From 6c2fcc7afd36d65d50b262bd590934c56ad9e689 Mon Sep 17 00:00:00 2001 From: Andrzej Bialecki Date: Tue, 29 Jul 2025 19:47:05 +0200 Subject: [PATCH 1/7] Fix the bug in QueryLimits initialization in SolrIndexSearcher. Improve the helper class to better track locations and counts where limits are enforced. --- .../apache/solr/search/SolrIndexSearcher.java | 16 ++- .../solr/search/CallerSpecificQueryLimit.java | 121 +++++++++++++++--- .../apache/solr/search/TestQueryLimits.java | 17 +-- 3 files changed, 122 insertions(+), 32 deletions(-) diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java index 0f0735b66acb..029ad312eae9 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java +++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java @@ -137,16 +137,15 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI public static final String STATS_SOURCE = "org.apache.solr.stats_source"; public static final String STATISTICS_KEY = "searcher"; + +public static final String EXITABLE_READER_PROPERTY = "solr.useExitableDirectoryReader"; + // These should *only* be used for debugging or monitoring purposes public static final AtomicLong numOpens = new AtomicLong(); public static final AtomicLong numCloses = new AtomicLong(); private static final Map> NO_GENERIC_CACHES = Collections.emptyMap(); private static final SolrCache[] NO_CACHES = new SolrCache[0]; - // If you find this useful, let us know in dev@solr.apache.org. Likely to be removed eventually. - private static final boolean useExitableDirectoryReader = - Boolean.getBoolean("solr.useExitableDirectoryReader"); - public static final int EXECUTOR_MAX_CPU_THREADS = Runtime.getRuntime().availableProcessors(); private final SolrCore core; @@ -213,8 +212,11 @@ private static DirectoryReader wrapReader(SolrCore core, DirectoryReader reader) throws IOException { assert reader != null; reader = UninvertingReader.wrap(reader, core.getLatestSchema().getUninversionMapper()); - if (useExitableDirectoryReader) { // SOLR-16693 legacy; may be removed. Probably inefficient. - reader = ExitableDirectoryReader.wrap(reader, QueryLimits.getCurrentLimits()); + if (EnvUtils.getPropertyAsBool(EXITABLE_READER_PROPERTY)) { // SOLR-16693 legacy; may be removed. Probably inefficient. + reader = ExitableDirectoryReader.wrap(reader, () -> { + // If the query limits are exceeded, we should exit the reader. + return QueryLimits.getCurrentLimits().shouldExit(); + }); } return reader; } @@ -775,7 +777,7 @@ public QueryResult search(QueryResult qr, QueryCommand cmd) throws IOException { protected void search(List leaves, Weight weight, Collector collector) throws IOException { QueryLimits queryLimits = QueryLimits.getCurrentLimits(); - if (useExitableDirectoryReader || !queryLimits.isLimitsEnabled()) { + if (EnvUtils.getPropertyAsBool(EXITABLE_READER_PROPERTY) || !queryLimits.isLimitsEnabled()) { // no timeout. Pass through to super class super.search(leaves, weight, collector); } else { diff --git a/solr/core/src/test/org/apache/solr/search/CallerSpecificQueryLimit.java b/solr/core/src/test/org/apache/solr/search/CallerSpecificQueryLimit.java index 96a6bd5ee937..46c3089a7751 100644 --- a/solr/core/src/test/org/apache/solr/search/CallerSpecificQueryLimit.java +++ b/solr/core/src/test/org/apache/solr/search/CallerSpecificQueryLimit.java @@ -17,51 +17,97 @@ package org.apache.solr.search; import java.lang.invoke.MethodHandles; +import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Helper class to simulate query timeouts at specific points in various components that call {@link * QueryLimits#shouldExit()}. These calling points are identified by the calling class' simple name - * and optionally a method name, e.g. MoreLikeThisComponent or - * ClusteringComponent.finishStage. + * and optionally a method name and the optional maximum count, e.g. MoreLikeThisComponent or + * ClusteringComponent.finishStage, ClusteringComponent.finishStage:100. + *

NOTE: implementation details cause the expression simpleName to be disabled when + * also any simpleName.anyMethod[:NNN] expression is used for the same class name.

+ *

NOTE 2: when maximum count is a negative number e.g. simpleName.someMethod:-1 then + * only the number of calls to {@link QueryLimits#shouldExit()} for that expression will be reported but + * no limit will be enforced.

*/ public class CallerSpecificQueryLimit implements QueryLimit { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - final StackWalker stackWalker = + private final StackWalker stackWalker = StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE); - final Map> interestingCallers = new HashMap<>(); - public String trippedBy; + // className -> set of method names + private final Map> interestingCallers = new HashMap<>(); + // expr -> initial count + private final Map maxCounts = new HashMap<>(); + // expr -> current count + private final Map callCounts = new HashMap<>(); + private Set trippedBy = new LinkedHashSet<>(); /** * Signal a timeout in places that match the calling classes (and methods). * - * @param callerExprs list of expressions in the format of simpleClassName[.methodName] - * . If the list is empty or null then the first call to {@link #shouldExit()} from any + * @param callerExprs list of expressions in the format of simpleClassName[.methodName][:NNN]. + * If the list is empty or null then the first call to {@link #shouldExit()} from any * caller will match. */ public CallerSpecificQueryLimit(String... callerExprs) { if (callerExprs != null && callerExprs.length > 0) { for (String callerExpr : callerExprs) { - String[] clazzMethod = callerExpr.split("\\."); + String[] exprCount = callerExpr.split(":"); + if (exprCount.length > 2) { + throw new RuntimeException("Invalid count in callerExpr: " + callerExpr); + } + String[] clazzMethod = exprCount[0].split("\\."); if (clazzMethod.length > 2) { - throw new RuntimeException("Invalid callerExpr: " + callerExpr); + throw new RuntimeException("Invalid method in callerExpr: " + callerExpr); } Set methods = interestingCallers.computeIfAbsent(clazzMethod[0], c -> new HashSet<>()); if (clazzMethod.length > 1) { methods.add(clazzMethod[1]); } + if (exprCount.length > 1) { + try { + int count = Integer.parseInt(exprCount[1]); + maxCounts.put(exprCount[0], count); + callCounts.put(exprCount[0], new AtomicInteger(0)); + } catch (NumberFormatException e) { + throw new RuntimeException("Invalid count in callerExpr: " + callerExpr, e); + } + } } } } + /** + * Returns the set of caller expressions that were tripped. + */ + public Set getTrippedBy() { + return trippedBy; + } + + /** + * Returns a map of tripped caller expressions to their current call counts. + */ + public Map getCallCounts() { + return callCounts.entrySet().stream() + .collect(Collectors.toMap( + e -> e.getKey() + + (maxCounts.containsKey(e.getKey()) ? ":" + maxCounts.get(e.getKey()) : ""), + e -> e.getValue().get())); + } + @Override public boolean shouldExit() { Optional matchingExpr = @@ -76,14 +122,61 @@ public boolean shouldExit() { } String method = frame.getMethodName(); if (interestingCallers.isEmpty()) { - // any caller is an interesting caller + // any caller is an offending caller + String expr = declaring.getSimpleName() + "." + method; + if (log.isInfoEnabled()) { + log.info("++++ Limit tripped by any first caller: {} ++++", expr); + } + trippedBy.add(expr); + callCounts.computeIfAbsent(expr, k -> new AtomicInteger()).incrementAndGet(); return true; } Set methods = interestingCallers.get(declaring.getSimpleName()); if (methods == null) { + // no class and no methods specified for this class, so skip + return false; + } + // MATCH. Class name was specified, possibly with methods. + // If methods is empty then all methods match, otherwise only the specified methods match. + if (methods.isEmpty() || methods.contains(method)) { + String expr = declaring.getSimpleName(); + if (methods.contains(method)) { + expr = expr + "." + method; + } else { + // even though we don't match/enforce at the method level, still record the method counts to give better insight into the callers + callCounts.computeIfAbsent(declaring.getSimpleName() + "." + method, k -> new AtomicInteger(0)).incrementAndGet(); + } + int currentCount = callCounts.computeIfAbsent(expr, k -> new AtomicInteger(0)).incrementAndGet(); + // check if we have a max count for this expression + if (maxCounts.containsKey(expr)) { + int maxCount = maxCounts.getOrDefault(expr, 0); + // if max count is negative then just report the call count + if (maxCount < 0) { + maxCount = Integer.MAX_VALUE; + } + if (currentCount > maxCount) { + if (log.isInfoEnabled()) { + log.info( + "++++ Limit tripped by caller: {}, current count: {}, max: {} ++++", + expr, + currentCount, + maxCounts.get(expr)); + } + trippedBy.add(expr + ":" + maxCounts.get(expr)); + return true; + } else { + return false; // max count not reached, not tripped yet + } + } else { + trippedBy.add(expr); + if (log.isInfoEnabled()) { + log.info("++++ Limit tripped by caller: {} ++++", expr); + } + return true; // no max count, so tripped on first call + } + } else { return false; } - return methods.isEmpty() || methods.contains(method); }) .map( frame -> @@ -93,12 +186,6 @@ public boolean shouldExit() { + "." + frame.getMethodName()) .findFirst()); - if (matchingExpr.isPresent()) { - if (log.isInfoEnabled()) { - log.info("++++ Limit tripped by caller: {} ++++", matchingExpr.get()); - } - trippedBy = matchingExpr.get(); - } return matchingExpr.isPresent(); } diff --git a/solr/core/src/test/org/apache/solr/search/TestQueryLimits.java b/solr/core/src/test/org/apache/solr/search/TestQueryLimits.java index 7b984bbf05ba..edb9a5da5956 100644 --- a/solr/core/src/test/org/apache/solr/search/TestQueryLimits.java +++ b/solr/core/src/test/org/apache/solr/search/TestQueryLimits.java @@ -26,6 +26,8 @@ import org.junit.BeforeClass; import org.junit.Test; +import java.util.Map; + public class TestQueryLimits extends SolrCloudTestCase { private static final String COLLECTION = "test"; @@ -66,7 +68,7 @@ public void testQueryLimits() throws Exception { "SearchHandler.handleRequestBody", "QueryComponent", "QueryComponent.process", - "FacetComponent.process" + "FacetComponent.process:2" }; for (String matchingExpr : matchingExprTests) { CallerSpecificQueryLimit limit = new CallerSpecificQueryLimit(matchingExpr); @@ -88,13 +90,12 @@ public void testQueryLimits() throws Exception { assertNotNull( "should have partial results for expr " + matchingExpr, rsp.getHeader().get("partialResults")); - if (matchingExpr.contains(".")) { - assertEquals(matchingExpr, limit.trippedBy); - } else { - assertTrue( - "expected result to start with " + matchingExpr + " but was " + limit.trippedBy, - limit.trippedBy.startsWith(matchingExpr)); - } + assertFalse("should have trippedBy info", limit.getTrippedBy().isEmpty()); + assertTrue( + "expected result to start with " + matchingExpr + " but was " + limit.getTrippedBy(), + limit.getTrippedBy().iterator().next().startsWith(matchingExpr)); + Map callCounts = limit.getCallCounts(); + assertTrue("call count should be > 0", callCounts.get(matchingExpr) > 0); } } } From 6cc270f1c3bdec1690198868d10352d91be8a24a Mon Sep 17 00:00:00 2001 From: Andrzej Bialecki Date: Wed, 30 Jul 2025 13:57:30 +0200 Subject: [PATCH 2/7] Rename the existing test because it was misleading. Create a unit test to verify the use of ExitableDirectoryReader. --- .../apache/solr/search/SolrIndexSearcher.java | 22 +- .../core/ExitableDirectoryReaderTest.java | 234 +++++------------- .../org/apache/solr/core/TimeAllowedTest.java | 225 +++++++++++++++++ 3 files changed, 303 insertions(+), 178 deletions(-) create mode 100644 solr/core/src/test/org/apache/solr/core/TimeAllowedTest.java diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java index 029ad312eae9..8814b4aba247 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java +++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java @@ -51,6 +51,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.MultiPostingsEnum; import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.QueryTimeout; import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; @@ -206,6 +207,22 @@ private static DirectoryReader getReader( } } + private static final class QueryLimitsTimeout implements QueryTimeout { + private static final QueryLimitsTimeout INSTANCE = new QueryLimitsTimeout(); + + private QueryLimitsTimeout() {} + + @Override + public boolean shouldExit() { + return QueryLimits.getCurrentLimits().shouldExit(); + } + + @Override + public String toString() { + return QueryLimits.getCurrentLimits().limitStatusMessage(); + } + } + // TODO: wrap elsewhere and return a "map" from the schema that overrides get() ? // this reader supports reopen private static DirectoryReader wrapReader(SolrCore core, DirectoryReader reader) @@ -213,10 +230,7 @@ private static DirectoryReader wrapReader(SolrCore core, DirectoryReader reader) assert reader != null; reader = UninvertingReader.wrap(reader, core.getLatestSchema().getUninversionMapper()); if (EnvUtils.getPropertyAsBool(EXITABLE_READER_PROPERTY)) { // SOLR-16693 legacy; may be removed. Probably inefficient. - reader = ExitableDirectoryReader.wrap(reader, () -> { - // If the query limits are exceeded, we should exit the reader. - return QueryLimits.getCurrentLimits().shouldExit(); - }); + reader = ExitableDirectoryReader.wrap(reader, QueryLimitsTimeout.INSTANCE); } return reader; } diff --git a/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java b/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java index f2e4aaa3c8f1..93112f1959e0 100644 --- a/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java +++ b/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java @@ -16,33 +16,19 @@ */ package org.apache.solr.core; -import static org.apache.solr.common.util.Utils.fromJSONString; - -import java.util.Map; import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.metrics.MetricsMap; -import org.apache.solr.metrics.SolrMetricManager; -import org.apache.solr.response.SolrQueryResponse; -import org.junit.BeforeClass; +import org.apache.solr.search.CallerSpecificQueryLimit; +import org.apache.solr.search.SolrIndexSearcher; +import org.apache.solr.util.TestInjection; +import org.junit.After; import org.junit.Test; -/** - * Test that checks that long-running queries are exited by Solr using the SolrQueryTimeoutImpl - * implementation. - */ -public class ExitableDirectoryReaderTest extends SolrTestCaseJ4 { +import java.util.Map; +public class ExitableDirectoryReaderTest extends SolrTestCaseJ4 { static final int NUM_DOCS = 100; static final String assertionString = "/response/numFound==" + NUM_DOCS; static final String failureAssertionString = "/responseHeader/partialResults==true]"; - static final String longTimeout = "10000"; - static final String sleep = "2"; - - @BeforeClass - public static void beforeClass() throws Exception { - initCore("solrconfig-delaying-component.xml", "schema_latest.xml"); - createIndex(); - } public static void createIndex() { for (int i = 0; i < NUM_DOCS; i++) { @@ -59,167 +45,67 @@ public static void createIndex() { assertU(commit()); } - @Test - public void testPrefixQuery() throws Exception { - String q = "name:a*"; - assertJQ(req("q", q, "timeAllowed", "1", "sleep", sleep), failureAssertionString); - - // do the same query and test for both success, and that the number of documents matched (i.e. - // make sure no partial results were cached) - assertJQ(req("q", q, "timeAllowed", longTimeout), assertionString); - - // this time we should get a query cache hit and hopefully no exception? this may change in the - // future if time checks are put into other places. - - // 2024-4-15: it did change..., and now this fails with 1 or 2 ms and passes with 3ms... I see - // no way this won't be terribly brittle. Maybe TestInjection of some sort to bring this back? - - // assertJQ(req("q", q, "timeAllowed", "2", "sleep", sleep), assertionString); - - // The idea that the request won't time out due to caching is a flawed test methodology, - // It relies on the test running quickly and not stalling. The above test should possibly - // be doing something along the lines of this (but we lack api for it) - // - // SolrCores solrCores = ExitableDirectoryReaderTest.h.getCoreContainer().solrCores; - // List cores = solrCores.getCores(); - // for (SolrCore core : cores) { - // if (<<< find the right core >>> ) { - // ((SolrCache)core.getSearcher().get().<<>> - // } - // } - - // now do the same for the filter cache - // 2024-4-15: this still passes probably because *:* is so fast, but it still worries me - assertJQ(req("q", "*:*", "fq", q, "timeAllowed", "1", "sleep", sleep), failureAssertionString); - - // make sure that the result succeeds this time, and that a bad filter wasn't cached - assertJQ(req("q", "*:*", "fq", q, "timeAllowed", longTimeout), assertionString); - - // test that Long.MAX_VALUE works - assertJQ(req("q", "name:b*", "timeAllowed", Long.toString(Long.MAX_VALUE)), assertionString); - - // negative timeAllowed should disable timeouts. - assertJQ(req("q", "name:c*", "timeAllowed", "-7"), assertionString); + @After + public void tearDown() throws Exception { + deleteCore(); + super.tearDown(); } - // There are lots of assumptions about how/when cache entries should be changed in this method. - // The simple case above shows the root problem without the confusion. testFilterSimpleCase should - // be removed once it is running and this test should be un-ignored and the assumptions verified. - // With all the weirdness, I'm not going to vouch for this test. Feel free to change it. @Test - public void testCacheAssumptions() throws Exception { - String fq = "name:d*"; - SolrCore core = h.getCore(); - MetricsMap filterCacheStats = - (MetricsMap) - ((SolrMetricManager.GaugeWrapper) - core.getCoreMetricManager() - .getRegistry() - .getMetrics() - .get("CACHE.searcher.filterCache")) - .getGauge(); - long fqInserts = (long) filterCacheStats.getValue().get("inserts"); - - MetricsMap queryCacheStats = - (MetricsMap) - ((SolrMetricManager.GaugeWrapper) - core.getCoreMetricManager() - .getRegistry() - .getMetrics() - .get("CACHE.searcher.queryResultCache")) - .getGauge(); - long qrInserts = (long) queryCacheStats.getValue().get("inserts"); - - // This gets 0 docs back. Use 10000 instead of 1 for timeAllowed, and it gets 100 back and the - // for loop below succeeds. - String response = - JQ(req("q", "*:*", "fq", fq, "indent", "true", "timeAllowed", "1", "sleep", sleep)); - Map res = (Map) fromJSONString(response); - Map body = (Map) (res.get("response")); - assertTrue("Should have fewer docs than " + NUM_DOCS, (long) (body.get("numFound")) < NUM_DOCS); - - Map header = (Map) (res.get("responseHeader")); - assertTrue( - "Should have partial results", - (Boolean) (header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))); - - assertEquals( - "Should NOT have inserted partial results in the cache!", - (long) queryCacheStats.getValue().get("inserts"), - qrInserts); - - assertEquals( - "Should NOT have another insert", - fqInserts, - (long) filterCacheStats.getValue().get("inserts")); - - // At the end of all this, we should have no hits in the queryResultCache. - response = JQ(req("q", "*:*", "fq", fq, "indent", "true", "timeAllowed", longTimeout)); - - // Check that we did insert this one. - assertEquals("Hits should still be 0", (long) filterCacheStats.getValue().get("hits"), 0L); - assertEquals( - "Inserts should be bumped", - (long) filterCacheStats.getValue().get("inserts"), - fqInserts + 1); - - res = (Map) fromJSONString(response); - body = (Map) (res.get("response")); + public void testWithExitableDirectoryReader() throws Exception { + System.setProperty(SolrIndexSearcher.EXITABLE_READER_PROPERTY, "true"); + initCore("solrconfig-delaying-component.xml", "schema_latest.xml"); + createIndex(); - assertEquals("Should have exactly " + NUM_DOCS, (long) (body.get("numFound")), NUM_DOCS); - header = (Map) (res.get("responseHeader")); - assertNull( - "Should NOT have partial results", - header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)); + // create a limit that will not trip but will report calls to shouldExit() + // NOTE: we need to use the inner class name to capture the calls + String callerExpr = "ExitableTermsEnum:-1"; + CallerSpecificQueryLimit queryLimit = new CallerSpecificQueryLimit(callerExpr); + TestInjection.queryTimeout = queryLimit; + String q = "name:a*"; + assertJQ(req("q", q), assertionString); + Map callCounts = queryLimit.getCallCounts(); + assertFalse("there should be some calls from ExitableTermsEnum", callCounts.isEmpty()); + assertTrue("there should be some calls from ExitableTermsEnum: " + callCounts, callCounts.get(callerExpr) > 0); + + // check that the limits are tripped in ExitableDirectoryReader + int maxCount = random().nextInt(10) + 1; + callerExpr = "ExitableTermsEnum:" + maxCount; + queryLimit = new CallerSpecificQueryLimit(callerExpr); + TestInjection.queryTimeout = queryLimit; + // avoid using the cache + q = "name:b*"; + assertJQ(req("q", q), failureAssertionString); + callCounts = queryLimit.getCallCounts(); + assertFalse("there should be some calls from ExitableTermsEnum", callCounts.isEmpty()); + assertTrue("there should be at least " + maxCount + " calls from ExitableTermsEnum: " + callCounts, callCounts.get(callerExpr) >= maxCount); } - // When looking at a problem raised on the user's list I ran across this anomaly with timeAllowed - // This tests for the second query NOT returning partial results, along with some other @Test - public void testQueryResults() throws Exception { - String q = "name:e*"; - SolrCore core = h.getCore(); - MetricsMap queryCacheStats = - (MetricsMap) - ((SolrMetricManager.GaugeWrapper) - core.getCoreMetricManager() - .getRegistry() - .getMetrics() - .get("CACHE.searcher.queryResultCache")) - .getGauge(); - Map nl = queryCacheStats.getValue(); - long inserts = (long) nl.get("inserts"); - - String response = JQ(req("q", q, "indent", "true", "timeAllowed", "1", "sleep", sleep)); - - // The queryResultCache should NOT get an entry here. - nl = queryCacheStats.getValue(); - assertEquals("Should NOT have inserted partial results!", inserts, (long) nl.get("inserts")); - - Map res = (Map) fromJSONString(response); - Map body = (Map) (res.get("response")); - Map header = (Map) (res.get("responseHeader")); - - assertTrue("Should have fewer docs than " + NUM_DOCS, (long) (body.get("numFound")) < NUM_DOCS); - assertTrue( - "Should have partial results", - (Boolean) (header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))); - - response = JQ(req("q", q, "indent", "true", "timeAllowed", longTimeout)); - - // Check that we did insert this one. - Map nl2 = queryCacheStats.getValue(); - assertEquals("Hits should still be 0", (long) nl.get("hits"), (long) nl2.get("hits")); - assertTrue("Inserts should be bumped", inserts < (long) nl2.get("inserts")); - - res = (Map) fromJSONString(response); - body = (Map) (res.get("response")); - header = (Map) (res.get("responseHeader")); + public void testWithoutExitableDirectoryReader() throws Exception { + System.setProperty(SolrIndexSearcher.EXITABLE_READER_PROPERTY, "false"); + initCore("solrconfig-delaying-component.xml", "schema_latest.xml"); + createIndex(); - assertEquals("Should have exactly " + NUM_DOCS, NUM_DOCS, (long) (body.get("numFound"))); - Boolean test = (Boolean) (header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)); - if (test != null) { - assertFalse("Should NOT have partial results", test); - } + // create a limit that will not trip but will report calls to shouldExit() + // NOTE: we need to use the inner class name to capture the calls + String callerExpr = "ExitableTermsEnum:-1"; + CallerSpecificQueryLimit queryLimit = new CallerSpecificQueryLimit(callerExpr); + TestInjection.queryTimeout = queryLimit; + String q = "name:a*"; + assertJQ(req("q", q), assertionString); + Map callCounts = queryLimit.getCallCounts(); + assertEquals("there should be no calls from ExitableTermsEnum", 0, (int) callCounts.get(callerExpr)); + + // check that the limits are not tripped in ExitableDirectoryReader, because it is not used + int maxCount = random().nextInt(10) + 1; + callerExpr = "ExitableTermsEnum:" + maxCount; + queryLimit = new CallerSpecificQueryLimit(callerExpr); + TestInjection.queryTimeout = queryLimit; + // avoid using the cache + q = "name:b*"; + assertJQ(req("q", q), assertionString); + callCounts = queryLimit.getCallCounts(); + assertEquals("there should be no calls from ExitableTermsEnum", 0, (int) callCounts.get(callerExpr)); } } diff --git a/solr/core/src/test/org/apache/solr/core/TimeAllowedTest.java b/solr/core/src/test/org/apache/solr/core/TimeAllowedTest.java new file mode 100644 index 000000000000..22dc5571bc74 --- /dev/null +++ b/solr/core/src/test/org/apache/solr/core/TimeAllowedTest.java @@ -0,0 +1,225 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.core; + +import static org.apache.solr.common.util.Utils.fromJSONString; + +import java.util.Map; +import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.metrics.MetricsMap; +import org.apache.solr.metrics.SolrMetricManager; +import org.apache.solr.response.SolrQueryResponse; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * Test that checks that long-running queries are exited by Solr using the {@link org.apache.solr.search.QueryLimits} + * implementation. + */ +public class TimeAllowedTest extends SolrTestCaseJ4 { + + static final int NUM_DOCS = 100; + static final String assertionString = "/response/numFound==" + NUM_DOCS; + static final String failureAssertionString = "/responseHeader/partialResults==true]"; + static final String longTimeout = "10000"; + static final String sleep = "2"; + + @BeforeClass + public static void beforeClass() throws Exception { + initCore("solrconfig-delaying-component.xml", "schema_latest.xml"); + createIndex(); + } + + public static void createIndex() { + for (int i = 0; i < NUM_DOCS; i++) { + assertU( + adoc( + "id", + Integer.toString(i), + "name", + "a" + i + " b" + i + " c" + i + " d" + i + " e" + i)); + if (random().nextInt(NUM_DOCS) == 0) { + assertU(commit()); // sometimes make multiple segments + } + } + assertU(commit()); + } + + @Test + public void testPrefixQuery() throws Exception { + String q = "name:a*"; + assertJQ(req("q", q, "timeAllowed", "1", "sleep", sleep), failureAssertionString); + + // do the same query and test for both success, and that the number of documents matched (i.e. + // make sure no partial results were cached) + assertJQ(req("q", q, "timeAllowed", longTimeout), assertionString); + + // this time we should get a query cache hit and hopefully no exception? this may change in the + // future if time checks are put into other places. + + // 2024-4-15: it did change..., and now this fails with 1 or 2 ms and passes with 3ms... I see + // no way this won't be terribly brittle. Maybe TestInjection of some sort to bring this back? + + // assertJQ(req("q", q, "timeAllowed", "2", "sleep", sleep), assertionString); + + // The idea that the request won't time out due to caching is a flawed test methodology, + // It relies on the test running quickly and not stalling. The above test should possibly + // be doing something along the lines of this (but we lack api for it) + // + // SolrCores solrCores = ExitableDirectoryReaderTest.h.getCoreContainer().solrCores; + // List cores = solrCores.getCores(); + // for (SolrCore core : cores) { + // if (<<< find the right core >>> ) { + // ((SolrCache)core.getSearcher().get().<<>> + // } + // } + + // now do the same for the filter cache + // 2024-4-15: this still passes probably because *:* is so fast, but it still worries me + assertJQ(req("q", "*:*", "fq", q, "timeAllowed", "1", "sleep", sleep), failureAssertionString); + + // make sure that the result succeeds this time, and that a bad filter wasn't cached + assertJQ(req("q", "*:*", "fq", q, "timeAllowed", longTimeout), assertionString); + + // test that Long.MAX_VALUE works + assertJQ(req("q", "name:b*", "timeAllowed", Long.toString(Long.MAX_VALUE)), assertionString); + + // negative timeAllowed should disable timeouts. + assertJQ(req("q", "name:c*", "timeAllowed", "-7"), assertionString); + } + + // There are lots of assumptions about how/when cache entries should be changed in this method. + // The simple case above shows the root problem without the confusion. testFilterSimpleCase should + // be removed once it is running and this test should be un-ignored and the assumptions verified. + // With all the weirdness, I'm not going to vouch for this test. Feel free to change it. + @Test + public void testCacheAssumptions() throws Exception { + String fq = "name:d*"; + SolrCore core = h.getCore(); + MetricsMap filterCacheStats = + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) + core.getCoreMetricManager() + .getRegistry() + .getMetrics() + .get("CACHE.searcher.filterCache")) + .getGauge(); + long fqInserts = (long) filterCacheStats.getValue().get("inserts"); + + MetricsMap queryCacheStats = + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) + core.getCoreMetricManager() + .getRegistry() + .getMetrics() + .get("CACHE.searcher.queryResultCache")) + .getGauge(); + long qrInserts = (long) queryCacheStats.getValue().get("inserts"); + + // This gets 0 docs back. Use 10000 instead of 1 for timeAllowed, and it gets 100 back and the + // for loop below succeeds. + String response = + JQ(req("q", "*:*", "fq", fq, "indent", "true", "timeAllowed", "1", "sleep", sleep)); + Map res = (Map) fromJSONString(response); + Map body = (Map) (res.get("response")); + assertTrue("Should have fewer docs than " + NUM_DOCS, (long) (body.get("numFound")) < NUM_DOCS); + + Map header = (Map) (res.get("responseHeader")); + assertTrue( + "Should have partial results", + (Boolean) (header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))); + + assertEquals( + "Should NOT have inserted partial results in the cache!", + (long) queryCacheStats.getValue().get("inserts"), + qrInserts); + + assertEquals( + "Should NOT have another insert", + fqInserts, + (long) filterCacheStats.getValue().get("inserts")); + + // At the end of all this, we should have no hits in the queryResultCache. + response = JQ(req("q", "*:*", "fq", fq, "indent", "true", "timeAllowed", longTimeout)); + + // Check that we did insert this one. + assertEquals("Hits should still be 0", (long) filterCacheStats.getValue().get("hits"), 0L); + assertEquals( + "Inserts should be bumped", + (long) filterCacheStats.getValue().get("inserts"), + fqInserts + 1); + + res = (Map) fromJSONString(response); + body = (Map) (res.get("response")); + + assertEquals("Should have exactly " + NUM_DOCS, (long) (body.get("numFound")), NUM_DOCS); + header = (Map) (res.get("responseHeader")); + assertNull( + "Should NOT have partial results", + header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)); + } + + // When looking at a problem raised on the user's list I ran across this anomaly with timeAllowed + // This tests for the second query NOT returning partial results, along with some other + @Test + public void testQueryResults() throws Exception { + String q = "name:e*"; + SolrCore core = h.getCore(); + MetricsMap queryCacheStats = + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) + core.getCoreMetricManager() + .getRegistry() + .getMetrics() + .get("CACHE.searcher.queryResultCache")) + .getGauge(); + Map nl = queryCacheStats.getValue(); + long inserts = (long) nl.get("inserts"); + + String response = JQ(req("q", q, "indent", "true", "timeAllowed", "1", "sleep", sleep)); + + // The queryResultCache should NOT get an entry here. + nl = queryCacheStats.getValue(); + assertEquals("Should NOT have inserted partial results!", inserts, (long) nl.get("inserts")); + + Map res = (Map) fromJSONString(response); + Map body = (Map) (res.get("response")); + Map header = (Map) (res.get("responseHeader")); + + assertTrue("Should have fewer docs than " + NUM_DOCS, (long) (body.get("numFound")) < NUM_DOCS); + assertTrue( + "Should have partial results", + (Boolean) (header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))); + + response = JQ(req("q", q, "indent", "true", "timeAllowed", longTimeout)); + + // Check that we did insert this one. + Map nl2 = queryCacheStats.getValue(); + assertEquals("Hits should still be 0", (long) nl.get("hits"), (long) nl2.get("hits")); + assertTrue("Inserts should be bumped", inserts < (long) nl2.get("inserts")); + + res = (Map) fromJSONString(response); + body = (Map) (res.get("response")); + header = (Map) (res.get("responseHeader")); + + assertEquals("Should have exactly " + NUM_DOCS, NUM_DOCS, (long) (body.get("numFound"))); + Boolean test = (Boolean) (header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)); + if (test != null) { + assertFalse("Should NOT have partial results", test); + } + } +} From ddc6c164b0f39b020ad04082045bf0987298433b Mon Sep 17 00:00:00 2001 From: Andrzej Bialecki Date: Wed, 30 Jul 2025 14:13:24 +0200 Subject: [PATCH 3/7] Tidy up. --- .../apache/solr/search/SolrIndexSearcher.java | 5 +- .../core/ExitableDirectoryReaderTest.java | 64 +++++++++---------- .../org/apache/solr/core/TimeAllowedTest.java | 4 +- .../solr/search/CallerSpecificQueryLimit.java | 62 ++++++++++-------- .../apache/solr/search/TestQueryLimits.java | 3 +- 5 files changed, 74 insertions(+), 64 deletions(-) diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java index 8814b4aba247..8519f8ba9180 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java +++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java @@ -139,7 +139,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI public static final String STATS_SOURCE = "org.apache.solr.stats_source"; public static final String STATISTICS_KEY = "searcher"; -public static final String EXITABLE_READER_PROPERTY = "solr.useExitableDirectoryReader"; + public static final String EXITABLE_READER_PROPERTY = "solr.useExitableDirectoryReader"; // These should *only* be used for debugging or monitoring purposes public static final AtomicLong numOpens = new AtomicLong(); @@ -229,7 +229,8 @@ private static DirectoryReader wrapReader(SolrCore core, DirectoryReader reader) throws IOException { assert reader != null; reader = UninvertingReader.wrap(reader, core.getLatestSchema().getUninversionMapper()); - if (EnvUtils.getPropertyAsBool(EXITABLE_READER_PROPERTY)) { // SOLR-16693 legacy; may be removed. Probably inefficient. + // see SOLR-16693 and SOLR-17831 for more details + if (EnvUtils.getPropertyAsBool(EXITABLE_READER_PROPERTY)) { reader = ExitableDirectoryReader.wrap(reader, QueryLimitsTimeout.INSTANCE); } return reader; diff --git a/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java b/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java index 93112f1959e0..b7a5f3b4ef6d 100644 --- a/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java +++ b/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java @@ -16,6 +16,7 @@ */ package org.apache.solr.core; +import java.util.Map; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.search.CallerSpecificQueryLimit; import org.apache.solr.search.SolrIndexSearcher; @@ -23,8 +24,6 @@ import org.junit.After; import org.junit.Test; -import java.util.Map; - public class ExitableDirectoryReaderTest extends SolrTestCaseJ4 { static final int NUM_DOCS = 100; static final String assertionString = "/response/numFound==" + NUM_DOCS; @@ -53,37 +52,18 @@ public void tearDown() throws Exception { @Test public void testWithExitableDirectoryReader() throws Exception { - System.setProperty(SolrIndexSearcher.EXITABLE_READER_PROPERTY, "true"); - initCore("solrconfig-delaying-component.xml", "schema_latest.xml"); - createIndex(); - - // create a limit that will not trip but will report calls to shouldExit() - // NOTE: we need to use the inner class name to capture the calls - String callerExpr = "ExitableTermsEnum:-1"; - CallerSpecificQueryLimit queryLimit = new CallerSpecificQueryLimit(callerExpr); - TestInjection.queryTimeout = queryLimit; - String q = "name:a*"; - assertJQ(req("q", q), assertionString); - Map callCounts = queryLimit.getCallCounts(); - assertFalse("there should be some calls from ExitableTermsEnum", callCounts.isEmpty()); - assertTrue("there should be some calls from ExitableTermsEnum: " + callCounts, callCounts.get(callerExpr) > 0); - - // check that the limits are tripped in ExitableDirectoryReader - int maxCount = random().nextInt(10) + 1; - callerExpr = "ExitableTermsEnum:" + maxCount; - queryLimit = new CallerSpecificQueryLimit(callerExpr); - TestInjection.queryTimeout = queryLimit; - // avoid using the cache - q = "name:b*"; - assertJQ(req("q", q), failureAssertionString); - callCounts = queryLimit.getCallCounts(); - assertFalse("there should be some calls from ExitableTermsEnum", callCounts.isEmpty()); - assertTrue("there should be at least " + maxCount + " calls from ExitableTermsEnum: " + callCounts, callCounts.get(callerExpr) >= maxCount); + doTestWithExitableDirectoryReader(true); } @Test public void testWithoutExitableDirectoryReader() throws Exception { - System.setProperty(SolrIndexSearcher.EXITABLE_READER_PROPERTY, "false"); + doTestWithExitableDirectoryReader(false); + } + + private void doTestWithExitableDirectoryReader(boolean withExitableDirectoryReader) + throws Exception { + System.setProperty( + SolrIndexSearcher.EXITABLE_READER_PROPERTY, String.valueOf(withExitableDirectoryReader)); initCore("solrconfig-delaying-component.xml", "schema_latest.xml"); createIndex(); @@ -95,17 +75,35 @@ public void testWithoutExitableDirectoryReader() throws Exception { String q = "name:a*"; assertJQ(req("q", q), assertionString); Map callCounts = queryLimit.getCallCounts(); - assertEquals("there should be no calls from ExitableTermsEnum", 0, (int) callCounts.get(callerExpr)); + if (withExitableDirectoryReader) { + assertTrue( + "there should be some calls from ExitableTermsEnum: " + callCounts, + callCounts.get(callerExpr) > 0); + } else { + assertEquals( + "there should be no calls from ExitableTermsEnum", 0, (int) callCounts.get(callerExpr)); + } - // check that the limits are not tripped in ExitableDirectoryReader, because it is not used + // check that the limits are tripped in ExitableDirectoryReader if it's in use int maxCount = random().nextInt(10) + 1; callerExpr = "ExitableTermsEnum:" + maxCount; queryLimit = new CallerSpecificQueryLimit(callerExpr); TestInjection.queryTimeout = queryLimit; // avoid using the cache q = "name:b*"; - assertJQ(req("q", q), assertionString); + if (withExitableDirectoryReader) { + assertJQ(req("q", q), failureAssertionString); + } else { + assertJQ(req("q", q), assertionString); + } callCounts = queryLimit.getCallCounts(); - assertEquals("there should be no calls from ExitableTermsEnum", 0, (int) callCounts.get(callerExpr)); + if (withExitableDirectoryReader) { + assertTrue( + "there should be at least " + maxCount + " calls from ExitableTermsEnum: " + callCounts, + callCounts.get(callerExpr) >= maxCount); + } else { + assertEquals( + "there should be no calls from ExitableTermsEnum", 0, (int) callCounts.get(callerExpr)); + } } } diff --git a/solr/core/src/test/org/apache/solr/core/TimeAllowedTest.java b/solr/core/src/test/org/apache/solr/core/TimeAllowedTest.java index 22dc5571bc74..32521802d136 100644 --- a/solr/core/src/test/org/apache/solr/core/TimeAllowedTest.java +++ b/solr/core/src/test/org/apache/solr/core/TimeAllowedTest.java @@ -27,8 +27,8 @@ import org.junit.Test; /** - * Test that checks that long-running queries are exited by Solr using the {@link org.apache.solr.search.QueryLimits} - * implementation. + * Test that checks that long-running queries are exited by Solr using the {@link + * org.apache.solr.search.QueryLimits} implementation. */ public class TimeAllowedTest extends SolrTestCaseJ4 { diff --git a/solr/core/src/test/org/apache/solr/search/CallerSpecificQueryLimit.java b/solr/core/src/test/org/apache/solr/search/CallerSpecificQueryLimit.java index 46c3089a7751..92313088c889 100644 --- a/solr/core/src/test/org/apache/solr/search/CallerSpecificQueryLimit.java +++ b/solr/core/src/test/org/apache/solr/search/CallerSpecificQueryLimit.java @@ -17,7 +17,6 @@ package org.apache.solr.search; import java.lang.invoke.MethodHandles; -import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; @@ -26,20 +25,22 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Helper class to simulate query timeouts at specific points in various components that call {@link * QueryLimits#shouldExit()}. These calling points are identified by the calling class' simple name - * and optionally a method name and the optional maximum count, e.g. MoreLikeThisComponent or + * and optionally a method name and the optional maximum count, e.g. MoreLikeThisComponent + * or * ClusteringComponent.finishStage, ClusteringComponent.finishStage:100. + * *

NOTE: implementation details cause the expression simpleName to be disabled when - * also any simpleName.anyMethod[:NNN] expression is used for the same class name.

- *

NOTE 2: when maximum count is a negative number e.g. simpleName.someMethod:-1 then - * only the number of calls to {@link QueryLimits#shouldExit()} for that expression will be reported but - * no limit will be enforced.

+ * also any simpleName.anyMethod[:NNN] expression is used for the same class name. + * + *

NOTE 2: when maximum count is a negative number e.g. simpleName.someMethod:-1 + * then only the number of calls to {@link QueryLimits#shouldExit()} for that expression will be + * reported but no limit will be enforced. */ public class CallerSpecificQueryLimit implements QueryLimit { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -57,9 +58,9 @@ public class CallerSpecificQueryLimit implements QueryLimit { /** * Signal a timeout in places that match the calling classes (and methods). * - * @param callerExprs list of expressions in the format of simpleClassName[.methodName][:NNN]. - * If the list is empty or null then the first call to {@link #shouldExit()} from any - * caller will match. + * @param callerExprs list of expressions in the format of + * simpleClassName[.methodName][:NNN]. If the list is empty or null then the first call + * to {@link #shouldExit()} from any caller will match. */ public CallerSpecificQueryLimit(String... callerExprs) { if (callerExprs != null && callerExprs.length > 0) { @@ -90,22 +91,22 @@ public CallerSpecificQueryLimit(String... callerExprs) { } } - /** - * Returns the set of caller expressions that were tripped. - */ + /** Returns the set of caller expressions that were tripped. */ public Set getTrippedBy() { return trippedBy; } - /** - * Returns a map of tripped caller expressions to their current call counts. - */ + /** Returns a map of tripped caller expressions to their current call counts. */ public Map getCallCounts() { return callCounts.entrySet().stream() - .collect(Collectors.toMap( - e -> e.getKey() + - (maxCounts.containsKey(e.getKey()) ? ":" + maxCounts.get(e.getKey()) : ""), - e -> e.getValue().get())); + .collect( + Collectors.toMap( + e -> + e.getKey() + + (maxCounts.containsKey(e.getKey()) + ? ":" + maxCounts.get(e.getKey()) + : ""), + e -> e.getValue().get())); } @Override @@ -128,7 +129,9 @@ public boolean shouldExit() { log.info("++++ Limit tripped by any first caller: {} ++++", expr); } trippedBy.add(expr); - callCounts.computeIfAbsent(expr, k -> new AtomicInteger()).incrementAndGet(); + callCounts + .computeIfAbsent(expr, k -> new AtomicInteger()) + .incrementAndGet(); return true; } Set methods = interestingCallers.get(declaring.getSimpleName()); @@ -137,16 +140,25 @@ public boolean shouldExit() { return false; } // MATCH. Class name was specified, possibly with methods. - // If methods is empty then all methods match, otherwise only the specified methods match. + // If methods is empty then all methods match, otherwise only the + // specified methods match. if (methods.isEmpty() || methods.contains(method)) { String expr = declaring.getSimpleName(); if (methods.contains(method)) { expr = expr + "." + method; } else { - // even though we don't match/enforce at the method level, still record the method counts to give better insight into the callers - callCounts.computeIfAbsent(declaring.getSimpleName() + "." + method, k -> new AtomicInteger(0)).incrementAndGet(); + // even though we don't match/enforce at the method level, still + // record the method counts to give better insight into the callers + callCounts + .computeIfAbsent( + declaring.getSimpleName() + "." + method, + k -> new AtomicInteger(0)) + .incrementAndGet(); } - int currentCount = callCounts.computeIfAbsent(expr, k -> new AtomicInteger(0)).incrementAndGet(); + int currentCount = + callCounts + .computeIfAbsent(expr, k -> new AtomicInteger(0)) + .incrementAndGet(); // check if we have a max count for this expression if (maxCounts.containsKey(expr)) { int maxCount = maxCounts.getOrDefault(expr, 0); diff --git a/solr/core/src/test/org/apache/solr/search/TestQueryLimits.java b/solr/core/src/test/org/apache/solr/search/TestQueryLimits.java index edb9a5da5956..01d7447c181e 100644 --- a/solr/core/src/test/org/apache/solr/search/TestQueryLimits.java +++ b/solr/core/src/test/org/apache/solr/search/TestQueryLimits.java @@ -16,6 +16,7 @@ */ package org.apache.solr.search; +import java.util.Map; import org.apache.lucene.tests.util.TestUtil; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -26,8 +27,6 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.util.Map; - public class TestQueryLimits extends SolrCloudTestCase { private static final String COLLECTION = "test"; From 3191eaaedccaf80cf4f10cec168dfca6b18ef7dd Mon Sep 17 00:00:00 2001 From: Andrzej Bialecki Date: Wed, 30 Jul 2025 17:17:51 +0200 Subject: [PATCH 4/7] Unnecessary name conflict. --- solr/core/src/java/org/apache/solr/search/QueryLimits.java | 4 ++-- .../org/apache/solr/core/ExitableDirectoryReaderTest.java | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/solr/core/src/java/org/apache/solr/search/QueryLimits.java b/solr/core/src/java/org/apache/solr/search/QueryLimits.java index e6e0db5eed94..60d27f061283 100644 --- a/solr/core/src/java/org/apache/solr/search/QueryLimits.java +++ b/solr/core/src/java/org/apache/solr/search/QueryLimits.java @@ -37,12 +37,12 @@ * or other resource limits. Exceeding any specified limit will cause {@link #shouldExit()} to * return true the next time it is checked (it may be checked in either Lucene code or Solr code) */ -public class QueryLimits implements QueryTimeout { +public final class QueryLimits implements QueryTimeout { public static final String UNLIMITED = "This request is unlimited."; private final List limits = new ArrayList<>(3); // timeAllowed, cpu, and memory anticipated - public static QueryLimits NONE = new QueryLimits(); + public static final QueryLimits NONE = new QueryLimits(); private final SolrQueryResponse rsp; private final boolean allowPartialResults; diff --git a/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java b/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java index b7a5f3b4ef6d..86e4dcc51a0f 100644 --- a/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java +++ b/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java @@ -45,9 +45,8 @@ public static void createIndex() { } @After - public void tearDown() throws Exception { + public void tearDownCore() { deleteCore(); - super.tearDown(); } @Test From c4f322305f6a613582796105e4d8c76b5ff68ae8 Mon Sep 17 00:00:00 2001 From: Andrzej Bialecki Date: Thu, 31 Jul 2025 13:23:28 +0200 Subject: [PATCH 5/7] Move the helper class to test-framework. Fix NPE. --- .../src/java/org/apache/solr/search/SolrIndexSearcher.java | 4 ++-- .../org/apache/solr/search/CallerSpecificQueryLimit.java | 0 2 files changed, 2 insertions(+), 2 deletions(-) rename solr/{core/src/test => test-framework/src/java}/org/apache/solr/search/CallerSpecificQueryLimit.java (100%) diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java index 8519f8ba9180..8d8bce0e9af1 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java +++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java @@ -230,7 +230,7 @@ private static DirectoryReader wrapReader(SolrCore core, DirectoryReader reader) assert reader != null; reader = UninvertingReader.wrap(reader, core.getLatestSchema().getUninversionMapper()); // see SOLR-16693 and SOLR-17831 for more details - if (EnvUtils.getPropertyAsBool(EXITABLE_READER_PROPERTY)) { + if (EnvUtils.getPropertyAsBool(EXITABLE_READER_PROPERTY, Boolean.FALSE)) { reader = ExitableDirectoryReader.wrap(reader, QueryLimitsTimeout.INSTANCE); } return reader; @@ -792,7 +792,7 @@ public QueryResult search(QueryResult qr, QueryCommand cmd) throws IOException { protected void search(List leaves, Weight weight, Collector collector) throws IOException { QueryLimits queryLimits = QueryLimits.getCurrentLimits(); - if (EnvUtils.getPropertyAsBool(EXITABLE_READER_PROPERTY) || !queryLimits.isLimitsEnabled()) { + if (EnvUtils.getPropertyAsBool(EXITABLE_READER_PROPERTY, Boolean.FALSE) || !queryLimits.isLimitsEnabled()) { // no timeout. Pass through to super class super.search(leaves, weight, collector); } else { diff --git a/solr/core/src/test/org/apache/solr/search/CallerSpecificQueryLimit.java b/solr/test-framework/src/java/org/apache/solr/search/CallerSpecificQueryLimit.java similarity index 100% rename from solr/core/src/test/org/apache/solr/search/CallerSpecificQueryLimit.java rename to solr/test-framework/src/java/org/apache/solr/search/CallerSpecificQueryLimit.java From 40cfff68e5f14d8851d6ba43ac5c8d518705ccad Mon Sep 17 00:00:00 2001 From: Andrzej Bialecki Date: Thu, 31 Jul 2025 14:35:58 +0200 Subject: [PATCH 6/7] Add unit test for CallerSpecificQueryLimit. Tidy up. --- .../apache/solr/search/SolrIndexSearcher.java | 3 +- .../solr/search/CallerSpecificQueryLimit.java | 51 +++--- .../search/CallerSpecificQueryLimitTest.java | 151 ++++++++++++++++++ 3 files changed, 181 insertions(+), 24 deletions(-) create mode 100644 solr/test-framework/src/test/org/apache/solr/search/CallerSpecificQueryLimitTest.java diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java index 8d8bce0e9af1..2e1c31691cae 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java +++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java @@ -792,7 +792,8 @@ public QueryResult search(QueryResult qr, QueryCommand cmd) throws IOException { protected void search(List leaves, Weight weight, Collector collector) throws IOException { QueryLimits queryLimits = QueryLimits.getCurrentLimits(); - if (EnvUtils.getPropertyAsBool(EXITABLE_READER_PROPERTY, Boolean.FALSE) || !queryLimits.isLimitsEnabled()) { + if (EnvUtils.getPropertyAsBool(EXITABLE_READER_PROPERTY, Boolean.FALSE) + || !queryLimits.isLimitsEnabled()) { // no timeout. Pass through to super class super.search(leaves, weight, collector); } else { diff --git a/solr/test-framework/src/java/org/apache/solr/search/CallerSpecificQueryLimit.java b/solr/test-framework/src/java/org/apache/solr/search/CallerSpecificQueryLimit.java index 92313088c889..a979e1eff7dd 100644 --- a/solr/test-framework/src/java/org/apache/solr/search/CallerSpecificQueryLimit.java +++ b/solr/test-framework/src/java/org/apache/solr/search/CallerSpecificQueryLimit.java @@ -17,9 +17,12 @@ package org.apache.solr.search; import java.lang.invoke.MethodHandles; +import java.util.Arrays; +import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; +import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; @@ -63,29 +66,31 @@ public class CallerSpecificQueryLimit implements QueryLimit { * to {@link #shouldExit()} from any caller will match. */ public CallerSpecificQueryLimit(String... callerExprs) { - if (callerExprs != null && callerExprs.length > 0) { - for (String callerExpr : callerExprs) { - String[] exprCount = callerExpr.split(":"); - if (exprCount.length > 2) { - throw new RuntimeException("Invalid count in callerExpr: " + callerExpr); - } - String[] clazzMethod = exprCount[0].split("\\."); - if (clazzMethod.length > 2) { - throw new RuntimeException("Invalid method in callerExpr: " + callerExpr); - } - Set methods = - interestingCallers.computeIfAbsent(clazzMethod[0], c -> new HashSet<>()); - if (clazzMethod.length > 1) { - methods.add(clazzMethod[1]); - } - if (exprCount.length > 1) { - try { - int count = Integer.parseInt(exprCount[1]); - maxCounts.put(exprCount[0], count); - callCounts.put(exprCount[0], new AtomicInteger(0)); - } catch (NumberFormatException e) { - throw new RuntimeException("Invalid count in callerExpr: " + callerExpr, e); - } + this(callerExprs != null ? Arrays.asList(callerExprs) : List.of()); + } + + public CallerSpecificQueryLimit(Collection callerExprs) { + for (String callerExpr : callerExprs) { + String[] exprCount = callerExpr.split(":"); + if (exprCount.length > 2) { + throw new RuntimeException("Invalid count in callerExpr: " + callerExpr); + } + String[] clazzMethod = exprCount[0].split("\\."); + if (clazzMethod.length > 2) { + throw new RuntimeException("Invalid method in callerExpr: " + callerExpr); + } + Set methods = + interestingCallers.computeIfAbsent(clazzMethod[0], c -> new HashSet<>()); + if (clazzMethod.length > 1) { + methods.add(clazzMethod[1]); + } + if (exprCount.length > 1) { + try { + int count = Integer.parseInt(exprCount[1]); + maxCounts.put(exprCount[0], count); + callCounts.put(exprCount[0], new AtomicInteger(0)); + } catch (NumberFormatException e) { + throw new RuntimeException("Invalid count in callerExpr: " + callerExpr, e); } } } diff --git a/solr/test-framework/src/test/org/apache/solr/search/CallerSpecificQueryLimitTest.java b/solr/test-framework/src/test/org/apache/solr/search/CallerSpecificQueryLimitTest.java new file mode 100644 index 000000000000..da8f58c73cf4 --- /dev/null +++ b/solr/test-framework/src/test/org/apache/solr/search/CallerSpecificQueryLimitTest.java @@ -0,0 +1,151 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.search; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.solr.SolrTestCaseJ4; +import org.junit.Test; + +public class CallerSpecificQueryLimitTest extends SolrTestCaseJ4 { + + private static class LimitedWorker { + private final CallerSpecificQueryLimit limit; + + static class NestedLimitedWorker { + private final CallerSpecificQueryLimit limit; + + NestedLimitedWorker(CallerSpecificQueryLimit limit) { + this.limit = limit; + } + + public void doWork() { + limit.shouldExit(); + } + } + + private NestedLimitedWorker nestedLimitedWorker; + + LimitedWorker(CallerSpecificQueryLimit limit) { + this.limit = limit; + nestedLimitedWorker = new NestedLimitedWorker(limit); + } + + public void doWork() { + nestedLimitedWorker.doWork(); + limit.shouldExit(); + } + } + + private static class LimitedWorker2 { + private final CallerSpecificQueryLimit limit; + + LimitedWorker2(CallerSpecificQueryLimit limit) { + this.limit = limit; + } + + public void doWork() { + limit.shouldExit(); + } + } + + private static final boolean[] values = {true, false}; + + @Test + public void testLimits() { + for (boolean withNested : values) { + for (boolean withMethod : values) { + for (boolean withCount : values) { + for (boolean shouldTrip : values) { + doTestWithLimit(withNested, withMethod, withCount, shouldTrip); + } + } + } + } + } + + private void doTestWithLimit( + boolean withNested, boolean withMethod, boolean withCount, boolean shouldTrip) { + List nonMatchingCallerExprs = new ArrayList<>(); + List matchingCallCounts = new ArrayList<>(); + String matchingClassName = + withNested + ? LimitedWorker.NestedLimitedWorker.class.getSimpleName() + : LimitedWorker.class.getSimpleName(); + String callerExpr = matchingClassName; + if (withMethod) { + callerExpr += ".doWork"; + } + int count = 1; + if (withCount) { + count = random().nextInt(9) + 1; + if (shouldTrip) { + callerExpr += ":" + count; + } else { + callerExpr += ":-" + count; + } + } else if (!shouldTrip) { + callerExpr += ":-1"; // no limit, should not trip + } + nonMatchingCallerExprs.add(LimitedWorker2.class.getSimpleName()); + nonMatchingCallerExprs.add(LimitedWorker2.class.getSimpleName() + ".doWork"); + if (withNested) { + nonMatchingCallerExprs.add(LimitedWorker.class.getSimpleName()); + } else { + nonMatchingCallerExprs.add(LimitedWorker.NestedLimitedWorker.class.getSimpleName()); + } + matchingCallCounts.add(callerExpr); + if (!withMethod) { + matchingCallCounts.add(matchingClassName + ".doWork"); + } + + CallerSpecificQueryLimit limit = new CallerSpecificQueryLimit(callerExpr); + LimitedWorker limitedWorker = new LimitedWorker(limit); + LimitedWorker2 limitedWorker2 = new LimitedWorker2(limit); + for (int i = 0; i < count * 2; i++) { + limitedWorker2.doWork(); + limitedWorker.doWork(); + } + Set trippedBy = limit.getTrippedBy(); + if (shouldTrip) { + assertFalse("Limit should have been tripped, callerExpr: " + callerExpr, trippedBy.isEmpty()); + for (String nonMatchingCallerExpr : nonMatchingCallerExprs) { + assertFalse( + "Limit should not have been tripped by " + + nonMatchingCallerExpr + + " but was: " + + trippedBy, + trippedBy.contains(nonMatchingCallerExpr)); + } + } else { + assertTrue( + "Limit should not have been tripped, callerExpr: " + + callerExpr + + ", trippedBy: " + + trippedBy, + trippedBy.isEmpty()); + } + Map callCounts = limit.getCallCounts(); + for (String matchingCallCount : matchingCallCounts) { + assertTrue( + "Call count for " + matchingCallCount + " should be > 0, callCounts: " + callCounts, + callCounts.getOrDefault(matchingCallCount, 0) > 0); + } + } +} From db80b2a10f2a381947cf16d821387ae9769bdc7f Mon Sep 17 00:00:00 2001 From: Andrzej Bialecki Date: Fri, 1 Aug 2025 13:04:45 +0200 Subject: [PATCH 7/7] Add CHANGES.txt entry. --- solr/CHANGES.txt | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index ab044e2f409d..6a79180bbf40 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -226,6 +226,15 @@ Other Changes --------------------- (No changes) +================== 9.9.1 ================== +Bug Fixes +--------------------- +* SOLR-17831: ExitableDirectoryReader always initialized with QueryLimits.NONE (Andrzej BiaƂecki) + +Dependency Upgrades +--------------------- +(No changes) + ================== 9.9.0 ================== New Features ---------------------