Skip to content

Commit

Permalink
Add regression test case.
Browse files Browse the repository at this point in the history
  • Loading branch information
gianm committed Nov 1, 2023
1 parent b178137 commit 439b7bc
Showing 1 changed file with 83 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
import org.apache.druid.query.groupby.GroupByQueryConfig;
import org.apache.druid.query.groupby.GroupingEngine;
import org.apache.druid.query.scan.ScanQuery;
import org.apache.druid.query.spec.MultipleIntervalSegmentSpec;
import org.apache.druid.query.timeseries.TimeseriesQuery;
import org.apache.druid.query.topn.TopNQuery;
import org.apache.druid.query.topn.TopNQueryBuilder;
Expand All @@ -81,6 +82,7 @@
import org.apache.druid.segment.join.JoinableFactory;
import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.segment.join.MapJoinableFactory;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.server.initialization.ServerConfig;
import org.apache.druid.server.scheduling.ManualQueryPrioritizationStrategy;
import org.apache.druid.server.scheduling.NoQueryLaningStrategy;
Expand Down Expand Up @@ -215,7 +217,7 @@ public class ClientQuerySegmentWalkerTest
private QueryRunnerFactoryConglomerate conglomerate;

// Queries that are issued; checked by "testQuery" against its "expectedQueries" parameter.
private List<ExpectedQuery> issuedQueries = new ArrayList<>();
private final List<ExpectedQuery> issuedQueries = new ArrayList<>();

// A ClientQuerySegmentWalker that has two segments: one for FOO and one for BAR; each with interval INTERVAL,
// version VERSION, and shard spec SHARD_SPEC.
Expand Down Expand Up @@ -717,7 +719,6 @@ public void testTopNScanMultiValue()

testQuery(
query,
// GroupBy handles its own subqueries; only the inner one will go to the cluster.
ImmutableList.of(
ExpectedQuery.cluster(subquery.withId(DUMMY_QUERY_ID).withSubQueryId("1.1")),
ExpectedQuery.local(
Expand Down Expand Up @@ -805,6 +806,73 @@ public void testTimeseriesOnGroupByOnTableErrorTooManyRows()
testQuery(query, ImmutableList.of(), ImmutableList.of());
}

@Test // Regression test for bug fixed in https://github.com/apache/druid/pull/15300
public void testScanOnScanWithStringExpression()
{
initWalker(
ImmutableMap.of(QueryContexts.MAX_SUBQUERY_ROWS_KEY, "1", QueryContexts.MAX_SUBQUERY_BYTES_KEY, "1000"),
scheduler
);

final Query<?> subquery =
Druids.newScanQueryBuilder()
.dataSource(FOO)
.intervals(new MultipleIntervalSegmentSpec(Intervals.ONLY_ETERNITY))
.columns("s")
.legacy(false)
.resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST)
.build()
.withId(DUMMY_QUERY_ID);

final Query<?> query =
Druids.newScanQueryBuilder()
.dataSource(new QueryDataSource(subquery))
.intervals(new MultipleIntervalSegmentSpec(Intervals.ONLY_ETERNITY))
.virtualColumns(
new ExpressionVirtualColumn(
"v",
"case_searched(s == 'x',2,3)",
ColumnType.LONG,
ExprMacroTable.nil()
)
)
.columns("v")
.legacy(false)
.resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST)
.build()
.withId(DUMMY_QUERY_ID);

testQuery(
query,
ImmutableList.of(
ExpectedQuery.cluster(subquery.withId(DUMMY_QUERY_ID).withSubQueryId("1.1")),
ExpectedQuery.local(
query.withDataSource(
InlineDataSource.fromIterable(
ImmutableList.of(
new Object[]{"x"},
new Object[]{"x"},
new Object[]{"y"},
new Object[]{"z"}
),
RowSignature.builder().add("s", null).build()
)
)
)
),
ImmutableList.of(
new Object[]{2L},
new Object[]{2L},
new Object[]{3L},
new Object[]{3L}
)
);

Assert.assertEquals(2, scheduler.getTotalRun().get());
Assert.assertEquals(1, scheduler.getTotalPrioritizedAndLaned().get());
Assert.assertEquals(2, scheduler.getTotalAcquired().get());
Assert.assertEquals(2, scheduler.getTotalReleased().get());
}

@Test
public void testTimeseriesOnGroupByOnTableErrorTooLarge()
Expand Down Expand Up @@ -1500,7 +1568,19 @@ private static class ExpectedQuery
);

if (modifiedQuery.getDataSource() instanceof FrameBasedInlineDataSource) {
// Do this recursively for if the query's datasource is a query datasource
// Do round-trip serialization in order to replace FrameBasedInlineDataSource with InlineDataSource, so
// comparisons work independently of whether we are using frames or regular inline datasets.
try {
modifiedQuery = modifiedQuery.withDataSource(
TestHelper.JSON_MAPPER.readValue(
TestHelper.JSON_MAPPER.writeValueAsBytes(modifiedQuery.getDataSource()),
DataSource.class
)
);
}
catch (IOException e) {
throw new RuntimeException(e);
}
}

this.query = modifiedQuery;
Expand Down

0 comments on commit 439b7bc

Please sign in to comment.