Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

sql support for dynamic parameters #6974

Merged
merged 45 commits into from
Feb 19, 2020
Merged
Show file tree
Hide file tree
Changes from 34 commits
Commits
Show all changes
45 commits
Select commit Hold shift + click to select a range
0cc2c66
sql support for dynamic parameters
clintropolis Jan 31, 2019
3c98981
Merge remote-tracking branch 'upstream/master' into sql-parameters
clintropolis Feb 3, 2019
f668777
fixup
clintropolis Feb 3, 2019
bcce2be
javadocs
clintropolis Feb 4, 2019
33c1894
Merge remote-tracking branch 'upstream/master' into sql-parameters
clintropolis Feb 15, 2019
568364d
Merge remote-tracking branch 'upstream/master' into sql-parameters
clintropolis Feb 27, 2019
be84ac8
Merge remote-tracking branch 'upstream/master' into sql-parameters
clintropolis Mar 11, 2019
3bedbd9
fixup from merge
clintropolis Mar 11, 2019
17d32aa
Merge remote-tracking branch 'upstream/master' into sql-parameters
clintropolis Mar 19, 2019
a1038bc
formatting
clintropolis Mar 19, 2019
37668fc
Merge remote-tracking branch 'upstream/master' into sql-parameters
clintropolis Apr 5, 2019
62ac051
fixes
clintropolis Apr 5, 2019
ed23a8c
fix it
clintropolis Apr 5, 2019
c6306ef
Merge remote-tracking branch 'upstream/master' into sql-parameters
clintropolis Apr 22, 2019
88f6084
doc fix
clintropolis Apr 22, 2019
2c00301
Merge remote-tracking branch 'upstream/master' into sql-parameters
clintropolis May 10, 2019
57b1342
remove druid fallback self-join parameterized test
clintropolis May 10, 2019
c7968d8
unused imports
clintropolis May 10, 2019
27cebc6
Merge remote-tracking branch 'upstream/master' into sql-parameters
clintropolis May 31, 2019
01fcf4e
ignore test for now
clintropolis May 31, 2019
a7075be
fix imports
clintropolis May 31, 2019
3c50d9b
Merge remote-tracking branch 'upstream/master' into sql-parameters
clintropolis Jun 24, 2019
5ea94c4
fixup
clintropolis Jun 25, 2019
7b554c0
Merge remote-tracking branch 'upstream/master' into sql-parameters
clintropolis Jul 9, 2019
ed4a40f
fix merge
clintropolis Jul 9, 2019
e018a55
Merge remote-tracking branch 'upstream/master' into sql-parameters
clintropolis Jul 12, 2019
6b4175f
merge fixup
clintropolis Jul 12, 2019
53b155f
fix test that cannot vectorize
clintropolis Jul 13, 2019
059320f
Merge remote-tracking branch 'upstream/master' into sql-parameters
clintropolis Jan 23, 2020
9ff9667
fixup and more better
clintropolis Jan 23, 2020
97ea9e5
dependency thingo
clintropolis Jan 24, 2020
ee5b46d
fix docs
clintropolis Jan 24, 2020
01cbeae
tweaks
clintropolis Jan 24, 2020
6e670d2
fix docs
clintropolis Jan 24, 2020
c773e21
Merge remote-tracking branch 'upstream/master' into sql-parameters
clintropolis Jan 29, 2020
aacae42
spelling
clintropolis Jan 29, 2020
d069f83
Merge remote-tracking branch 'upstream/master' into sql-parameters
clintropolis Feb 5, 2020
682833c
Merge remote-tracking branch 'upstream/master' into sql-parameters
clintropolis Feb 5, 2020
8987d1b
unused imports after merge
clintropolis Feb 6, 2020
2819431
Merge remote-tracking branch 'upstream/master' into sql-parameters
clintropolis Feb 16, 2020
e1dceef
Merge remote-tracking branch 'upstream/master' into sql-parameters
clintropolis Feb 17, 2020
8bdc691
review stuffs
clintropolis Feb 18, 2020
d967e12
add comment
clintropolis Feb 18, 2020
8f669ee
add ignore text
clintropolis Feb 18, 2020
6279270
review stuffs
clintropolis Feb 19, 2020
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ public void querySql(Blackhole blackhole) throws Exception
final Map<String, Object> context = ImmutableMap.of("vectorize", vectorize);
final AuthenticationResult authenticationResult = NoopEscalator.getInstance()
.createEscalatedAuthenticationResult();
try (final DruidPlanner planner = plannerFactory.createPlanner(context, authenticationResult)) {
try (final DruidPlanner planner = plannerFactory.createPlanner(context, ImmutableList.of(), authenticationResult)) {
final PlannerResult plannerResult = planner.plan(QUERIES.get(Integer.parseInt(query)));
final Sequence<Object[]> resultSequence = plannerResult.run();
final Object[] lastRow = resultSequence.accumulate(null, (accumulated, in) -> in);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

package org.apache.druid.benchmark.query;

import com.google.common.collect.ImmutableList;
import org.apache.druid.benchmark.datagen.BenchmarkSchemaInfo;
import org.apache.druid.benchmark.datagen.BenchmarkSchemas;
import org.apache.druid.benchmark.datagen.SegmentGenerator;
Expand Down Expand Up @@ -167,9 +168,9 @@ public void queryNative(Blackhole blackhole)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
public void queryPlanner(Blackhole blackhole) throws Exception
{
final AuthenticationResult authenticationResult = NoopEscalator.getInstance()
.createEscalatedAuthenticationResult();
try (final DruidPlanner planner = plannerFactory.createPlanner(null, authenticationResult)) {
final AuthenticationResult authResult = NoopEscalator.getInstance()
.createEscalatedAuthenticationResult();
try (final DruidPlanner planner = plannerFactory.createPlanner(null, ImmutableList.of(), authResult)) {
final PlannerResult plannerResult = planner.plan(sqlQuery);
final Sequence<Object[]> resultSequence = plannerResult.run();
final Object[] lastRow = resultSequence.accumulate(null, (accumulated, in) -> in);
Expand Down
52 changes: 48 additions & 4 deletions docs/querying/sql.md
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,9 @@ like `100` (denoting an integer), `100.0` (denoting a floating point value), or
timestamps can be written like `TIMESTAMP '2000-01-01 00:00:00'`. Literal intervals, used for time arithmetic, can be
written like `INTERVAL '1' HOUR`, `INTERVAL '1 02:03' DAY TO MINUTE`, `INTERVAL '1-2' YEAR TO MONTH`, and so on.

Druid SQL supports dynamic parameters using the `?` syntax where parameters are bound to `?` in order. Replace any
literal with a `?` and supply parameters to the query and the values will be bound at execution time.

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggestion: This description feels like it could be expanded upon just a bit, just to draw out what is meant by "in order," for one thing. How about something like:

"Druid SQL supports dynamic parameters in question mark (?) syntax, where parameters are bound to the ? placeholders at execution time. To use dynamic parameters, replace any  literal in the query with a ? character and ensure that corresponding parameter values are provided at execution time. Parameters are bound to the placeholders in the order in which they are passed."

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

sgtm, changed


Druid SQL supports SELECT queries with the following structure:

```
Expand Down Expand Up @@ -516,6 +519,17 @@ of configuration.
You can make Druid SQL queries using JSON over HTTP by posting to the endpoint `/druid/v2/sql/`. The request should
be a JSON object with a "query" field, like `{"query" : "SELECT COUNT(*) FROM data_source WHERE foo = 'bar'"}`.

##### Request

|Property|Type|Description|Required|
|--------|----|-----------|--------|
|`query`|`String`| SQL query to run| yes |
|`resultFormat`|`String` (`ResultFormat`)| Result format for output | no (default `"object"`)|
|`header`|`Boolean`| Write column name header for supporting formats| no (default `false`)|
|`context`|`Object`| Connection context map. see [connection context parameters](#connection-context)| no |
|`parameters`|`SqlParameter` list| List of query parameters for parameterized queries. | no |


You can use _curl_ to send SQL queries from the command-line:

```bash
Expand All @@ -538,7 +552,27 @@ like:
}
```

Metadata is available over the HTTP API by querying [system tables](#metadata-tables).
Parameterized SQL queries are also supported:

```json
{
"query" : "SELECT COUNT(*) FROM data_source WHERE foo = ? AND __time > ?",
"parameters": [
{ "type": "VARCHAR", "value": "bar"},
{ "type": "TIMESTAMP", "value": "2000-01-01 00:00:00" }
]
}
```

##### SqlParameter

|Property|Type|Description|Required|
|--------|----|-----------|--------|
|`type`|`String` (`SqlType`) | String value of `SqlType` of parameter. [`SqlType`](https://calcite.apache.org/avatica/apidocs/org/apache/calcite/avatica/SqlType.html) is an friendly wrapper around [`java.sql.Types`](https://docs.oracle.com/javase/8/docs/api/java/sql/Types.html?is-external=true)|yes|

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: "...is a friendly wrapper..."

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

👍

|`value`|`Object`| Value of the parameter|yes|


Metadata is also available over the HTTP API by querying [system tables](#metadata-tables).

#### Responses

Expand Down Expand Up @@ -605,7 +639,7 @@ Properties connectionProperties = new Properties();
try (Connection connection = DriverManager.getConnection(url, connectionProperties)) {
try (
final Statement statement = connection.createStatement();
final ResultSet resultSet = statement.executeQuery(query)
final ResultSet resultSet = statement.executeQuery(query);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think the original example is better as the semicolon is not needed

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

removed

) {
while (resultSet.next()) {
// Do something
Expand All @@ -615,8 +649,7 @@ try (Connection connection = DriverManager.getConnection(url, connectionProperti
```

Table metadata is available over JDBC using `connection.getMetaData()` or by querying the
["INFORMATION_SCHEMA" tables](#metadata-tables). Parameterized queries (using `?` or other placeholders) don't work properly,
so avoid those.
["INFORMATION_SCHEMA" tables](#metadata-tables).

#### Connection stickiness

Expand All @@ -628,6 +661,17 @@ the necessary stickiness even with a normal non-sticky load balancer. Please see

Note that the non-JDBC [JSON over HTTP](#json-over-http) API is stateless and does not require stickiness.

### Dynamic Parameters

Parameterized queries are supported with JDBC:

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I misread this at first as meaning JDBC enables parameterized queries somehow. Is this wording clearer:
"You can use parameterized queries in JDBC code, as in this example;"

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

changed


```java
PreparedStatement statement = connection.prepareStatement("SELECT COUNT(*) AS cnt FROM druid.foo WHERE dim1 = ? OR dim1 = ?");
statement.setString(1, "abc");
statement.setString(2, "def");
final ResultSet resultSet = statement.executeQuery(query);
```

### Connection context

Druid SQL supports setting connection parameters on the client. The parameters in the table below affect SQL planning.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,12 @@ public void testComputingSketchOnNumericValues() throws Exception
+ "FROM foo";

// Verify results
final List<Object[]> results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList();
final List<Object[]> results = sqlLifecycle.runSimple(
sql,
QUERY_CONTEXT_DEFAULT,
DEFAULT_PARAMETERS,
authenticationResult
).toList();
final List<String[]> expectedResults = ImmutableList.of(
new String[] {
"\"AAAAAT/wAAAAAAAAQBgAAAAAAABAaQAAAAAAAAAAAAY/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAABAAAAAAAAAAD/wAAAAAAAAQAgAAAAAAAA/8AAAAAAAAEAQAAAAAAAAP/AAAAAAAABAFAAAAAAAAD/wAAAAAAAAQBgAAAAAAAA=\""
Expand Down Expand Up @@ -220,7 +225,12 @@ public void testDefaultCompressionForTDigestGenerateSketchAgg() throws Exception
+ "FROM foo";

// Log query
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList();
sqlLifecycle.runSimple(
sql,
QUERY_CONTEXT_DEFAULT,
DEFAULT_PARAMETERS,
authenticationResult
).toList();

// Verify query
Assert.assertEquals(
Expand Down Expand Up @@ -249,7 +259,12 @@ public void testComputingQuantileOnPreAggregatedSketch() throws Exception
+ "FROM foo";

// Verify results
final List<Object[]> results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList();
final List<Object[]> results = sqlLifecycle.runSimple(
sql,
QUERY_CONTEXT_DEFAULT,
DEFAULT_PARAMETERS,
authenticationResult
).toList();
final List<double[]> expectedResults = ImmutableList.of(
new double[] {
1.1,
Expand Down Expand Up @@ -298,7 +313,12 @@ public void testGeneratingSketchAndComputingQuantileOnFly() throws Exception
+ "FROM (SELECT dim1, TDIGEST_GENERATE_SKETCH(m1, 200) AS x FROM foo group by dim1)";

// Verify results
final List<Object[]> results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList();
final List<Object[]> results = sqlLifecycle.runSimple(
sql,
QUERY_CONTEXT_DEFAULT,
DEFAULT_PARAMETERS,
authenticationResult
).toList();
final List<double[]> expectedResults = ImmutableList.of(
new double[] {
1.0,
Expand Down Expand Up @@ -364,7 +384,12 @@ public void testQuantileOnNumericValues() throws Exception
+ "FROM foo";

// Verify results
final List<Object[]> results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList();
final List<Object[]> results = sqlLifecycle.runSimple(
sql,
QUERY_CONTEXT_DEFAULT,
DEFAULT_PARAMETERS,
authenticationResult
).toList();
final List<double[]> expectedResults = ImmutableList.of(
new double[] {
1.0,
Expand Down Expand Up @@ -411,7 +436,12 @@ public void testCompressionParamForTDigestQuantileAgg() throws Exception
+ "FROM foo";

// Log query
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList();
sqlLifecycle.runSimple(
sql,
QUERY_CONTEXT_DEFAULT,
DEFAULT_PARAMETERS,
authenticationResult
).toList();

// Verify query
Assert.assertEquals(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,12 @@ public void testApproxCountDistinctHllSketch() throws Exception
+ "FROM druid.foo";

// Verify results
final List<Object[]> results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList();
final List<Object[]> results = sqlLifecycle.runSimple(
sql,
QUERY_CONTEXT_DEFAULT,
DEFAULT_PARAMETERS,
authenticationResult
).toList();
final List<Object[]> expectedResults;

if (NullHandling.replaceWithDefault()) {
Expand Down Expand Up @@ -335,7 +340,12 @@ public void testAvgDailyCountDistinctHllSketch() throws Exception
+ ")";

// Verify results
final List<Object[]> results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList();
final List<Object[]> results = sqlLifecycle.runSimple(
sql,
QUERY_CONTEXT_DEFAULT,
DEFAULT_PARAMETERS,
authenticationResult
).toList();
final List<Object[]> expectedResults = ImmutableList.of(
new Object[]{
1L
Expand Down Expand Up @@ -431,7 +441,8 @@ public void testApproxCountDistinctHllSketchIsRounded() throws Exception
+ " HAVING APPROX_COUNT_DISTINCT_DS_HLL(m1) = 2";

// Verify results
final List<Object[]> results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList();
final List<Object[]> results =
sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, DEFAULT_PARAMETERS, authenticationResult).toList();
final int expected = NullHandling.replaceWithDefault() ? 1 : 2;
Assert.assertEquals(expected, results.size());
}
Expand All @@ -458,7 +469,12 @@ public void testHllSketchPostAggs() throws Exception
+ "FROM druid.foo";

// Verify results
final List<Object[]> results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList();
final List<Object[]> results = sqlLifecycle.runSimple(
sql,
QUERY_CONTEXT_DEFAULT,
DEFAULT_PARAMETERS,
authenticationResult
).toList();
final List<Object[]> expectedResults = ImmutableList.of(
new Object[]{
"\"AgEHDAMIAgDhUv8P63iABQ==\"",
Expand Down Expand Up @@ -604,7 +620,12 @@ public void testtHllSketchPostAggsPostSort() throws Exception
final String sql2 = StringUtils.format("SELECT HLL_SKETCH_ESTIMATE(y), HLL_SKETCH_TO_STRING(y) from (%s)", sql);

// Verify results
final List<Object[]> results = sqlLifecycle.runSimple(sql2, QUERY_CONTEXT_DEFAULT, authenticationResult).toList();
final List<Object[]> results = sqlLifecycle.runSimple(
sql2,
QUERY_CONTEXT_DEFAULT,
DEFAULT_PARAMETERS,
authenticationResult
).toList();
final List<Object[]> expectedResults = ImmutableList.of(
new Object[]{
2.000000004967054d,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,12 @@ public void testQuantileOnFloatAndLongs() throws Exception
+ "FROM foo";

// Verify results
final List<Object[]> results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList();
final List<Object[]> results = sqlLifecycle.runSimple(
sql,
QUERY_CONTEXT_DEFAULT,
DEFAULT_PARAMETERS,
authenticationResult
).toList();
final List<Object[]> expectedResults = ImmutableList.of(
new Object[]{
1.0,
Expand Down Expand Up @@ -304,7 +309,12 @@ public void testQuantileOnComplexColumn() throws Exception
+ "FROM foo";

// Verify results
final List<Object[]> results = lifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList();
final List<Object[]> results = lifecycle.runSimple(
sql,
QUERY_CONTEXT_DEFAULT,
DEFAULT_PARAMETERS,
authenticationResult
).toList();
final List<Object[]> expectedResults = ImmutableList.of(
new Object[]{
1.0,
Expand Down Expand Up @@ -363,7 +373,12 @@ public void testQuantileOnInnerQuery() throws Exception
+ "FROM (SELECT dim2, SUM(m1) AS x FROM foo GROUP BY dim2)";

// Verify results
final List<Object[]> results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList();
final List<Object[]> results = sqlLifecycle.runSimple(
sql,
QUERY_CONTEXT_DEFAULT,
DEFAULT_PARAMETERS,
authenticationResult
).toList();
final List<Object[]> expectedResults;
if (NullHandling.replaceWithDefault()) {
expectedResults = ImmutableList.of(new Object[]{7.0, 11.0});
Expand Down Expand Up @@ -432,7 +447,12 @@ public void testQuantileOnInnerQuantileQuery() throws Exception
+ "FROM (SELECT dim1, dim2, APPROX_QUANTILE_DS(m1, 0.5) AS x FROM foo GROUP BY dim1, dim2) GROUP BY dim1";


final List<Object[]> results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList();
final List<Object[]> results = sqlLifecycle.runSimple(
sql,
QUERY_CONTEXT_DEFAULT,
DEFAULT_PARAMETERS,
authenticationResult
).toList();

ImmutableList.Builder<Object[]> builder = ImmutableList.builder();
builder.add(new Object[]{"", 1.0});
Expand Down Expand Up @@ -513,7 +533,12 @@ public void testDoublesSketchPostAggs() throws Exception
+ "FROM foo";

// Verify results
final List<Object[]> results = sqlLifecycle.runSimple(sql, QUERY_CONTEXT_DEFAULT, authenticationResult).toList();
final List<Object[]> results = sqlLifecycle.runSimple(
sql,
QUERY_CONTEXT_DEFAULT,
DEFAULT_PARAMETERS,
authenticationResult
).toList();
final List<Object[]> expectedResults = ImmutableList.of(
new Object[]{
6L,
Expand Down Expand Up @@ -680,7 +705,12 @@ public void testDoublesSketchPostAggsPostSort() throws Exception
final String sql2 = StringUtils.format("SELECT DS_GET_QUANTILE(y, 0.5), DS_GET_QUANTILE(y, 0.98) from (%s)", sql);

// Verify results
final List<Object[]> results = sqlLifecycle.runSimple(sql2, QUERY_CONTEXT_DEFAULT, authenticationResult).toList();
final List<Object[]> results = sqlLifecycle.runSimple(
sql2,
QUERY_CONTEXT_DEFAULT,
DEFAULT_PARAMETERS,
authenticationResult
).toList();
final List<Object[]> expectedResults = ImmutableList.of(
new Object[]{
4.0d,
Expand Down
Loading