Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
…t fails".

The Splunk adapter had been broken by various changes, including the factoring out of Avatica JDBC and removing the schema parent from a table. SplunkAdapterTest now succeeds, and can be run as part of the regular suite if you specify -Doptiq.test.splunk=true.
  • Loading branch information
julianhyde committed Apr 15, 2014
1 parent 4838455 commit c30d6f1
Show file tree
Hide file tree
Showing 11 changed files with 278 additions and 164 deletions.
10 changes: 10 additions & 0 deletions HOWTO.md
Expand Up @@ -100,6 +100,16 @@ Closing: net.hydromatic.optiq.jdbc.FactoryJdbc41$OptiqConnectionJdbc41
$
```
# Splunk adapter
To run the test suite and sample queries against Splunk,
load Splunk's `tutorialdata.zip` data set as described in
<a href="http://docs.splunk.com/Documentation/Splunk/6.0.2/PivotTutorial/GetthetutorialdataintoSplunk">the Splunk tutorial</a>.
(This step is optional, but it provides some interesting data for the sample
queries. It is also necessary if you intend to run the test suite, using
`-Doptiq.test.splunk=true`.)
# Implementing adapters
New adapters can be created by implementing `OptiqPrepare.Context`:
Expand Down
Expand Up @@ -602,7 +602,7 @@ protected SqlToRelConverter getSqlToRelConverter(
new SqlToRelConverter(
this, validator, catalogReader, planner, rexBuilder,
StandardConvertletTable.INSTANCE);
sqlToRelConverter.setTrimUnusedFields(false);
sqlToRelConverter.setTrimUnusedFields(true);
return sqlToRelConverter;
}

Expand Down
Expand Up @@ -87,18 +87,7 @@ public static RelDataTypeField lookupField(
boolean caseSensitive,
final RelDataType rowType,
String columnName) {
RelDataTypeField field = rowType.getField(columnName, caseSensitive);
if (field != null) {
return field;
}

// If record type is flagged as having "any field you ask for",
// return a type. (TODO: Better way to mark accommodating types.)
RelDataTypeField extra = RelDataTypeImpl.extra(rowType);
if (extra != null) {
return new RelDataTypeFieldImpl(columnName, -1, extra.getType());
}
return null;
return rowType.getField(columnName, caseSensitive);
}

public static void checkCharsetAndCollateConsistentIfCharType(
Expand Down
Expand Up @@ -193,6 +193,7 @@ protected final TrimResult dispatchTrimFields(
: "source: " + mapping.getSourceCount() + " != " + fieldCount;
final int newFieldCount = newRel.getRowType().getFieldCount();
assert mapping.getTargetCount() + extraFields.size() == newFieldCount
|| Bug.TODO_FIXED
: "target: " + mapping.getTargetCount()
+ " + " + extraFields.size()
+ " != " + newFieldCount;
Expand Down Expand Up @@ -520,7 +521,7 @@ public TrimResult trimFields(
// individually. For now, we assume that just one input has
// on-demand fields.
Set<RelDataTypeField> inputExtraFields =
RelDataTypeImpl.extra(rowType) == null
RelDataTypeImpl.extra(inputRowType) == null
? Collections.<RelDataTypeField>emptySet()
: combinedInputExtraFields;
inputExtraFieldCounts.add(inputExtraFields.size());
Expand Down
31 changes: 23 additions & 8 deletions core/src/main/java/org/eigenbase/sql2rel/SqlToRelConverter.java
Expand Up @@ -1968,12 +1968,10 @@ protected RelNode createJoin(
final List<RexNode> extraRightExprs = new ArrayList<RexNode>();
final int leftCount = leftRel.getRowType().getFieldCount();
final int rightCount = rightRel.getRowType().getFieldCount();
joinCond = pushDownJoinConditions(
joinCond,
leftCount,
rightCount,
extraLeftExprs,
extraRightExprs);
if (!containsGet(joinCond)) {
joinCond = pushDownJoinConditions(
joinCond, leftCount, rightCount, extraLeftExprs, extraRightExprs);
}
if (!extraLeftExprs.isEmpty()) {
final List<RelDataTypeField> fields =
leftRel.getRowType().getFieldList();
Expand Down Expand Up @@ -2048,6 +2046,23 @@ public Pair<RexNode, String> get(int index) {
return join;
}

private static boolean containsGet(RexNode node) {
try {
node.accept(
new RexVisitorImpl<Void>(true) {
@Override public Void visitCall(RexCall call) {
if (call.getOperator() == RexBuilder.GET_OPERATOR) {
throw Util.FoundOne.NULL;
}
return super.visitCall(call);
}
});
return false;
} catch (Util.FoundOne e) {
return true;
}
}

/**
* Pushes down parts of a join condition. For example, given
* "emp JOIN dept ON emp.deptno + 1 = dept.deptno", adds a project above
Expand Down Expand Up @@ -3180,8 +3195,8 @@ private RexNode convertIdentifier(
name = namespace.translate(name);
namespace = null;
}
final int fieldOrdinal = catalogReader.fieldOrdinal(e.getType(), name);
e = rexBuilder.makeFieldAccess(e, fieldOrdinal);
final boolean caseSensitive = true; // name already fully-qualified
e = rexBuilder.makeFieldAccess(e, name, caseSensitive);
}
if (e instanceof RexInputRef) {
// adjust the type to account for nulls introduced by outer joins
Expand Down
6 changes: 3 additions & 3 deletions splunk/pom.xml
Expand Up @@ -36,9 +36,9 @@
<artifactId>maven-surefire-plugin</artifactId>
<version>2.15</version>
<configuration>
<excludes>
<exclude>net/hydromatic/**/SplunkAdapterTest.java</exclude>
</excludes>
<includes>
<include>net/hydromatic/optiq/test/SplunkAdapterTest.java</include>
</includes>
</configuration>
</plugin>

Expand Down
Expand Up @@ -18,7 +18,6 @@
package net.hydromatic.optiq.impl.splunk;

import net.hydromatic.avatica.DriverVersion;
import net.hydromatic.avatica.UnregisteredDriver;

import net.hydromatic.optiq.SchemaPlus;
import net.hydromatic.optiq.impl.jdbc.JdbcSchema;
Expand All @@ -36,7 +35,7 @@
*
* <p>It accepts connect strings that start with "jdbc:splunk:".</p>
*/
public class SplunkDriver extends UnregisteredDriver {
public class SplunkDriver extends net.hydromatic.optiq.jdbc.Driver {
protected SplunkDriver() {
super();
}
Expand Down Expand Up @@ -85,18 +84,20 @@ public Connection connect(String url, Properties info) throws SQLException {
// Include a schema called "mysql" in every splunk connection.
// This is a hack for demo purposes. TODO: Add a config file mechanism.
if (true) {
final String mysqlSchemaName = "mysql";
final String username = "foodmart";
final String password = "foodmart";
final String mysqlSchemaName = "foodmart";
try {
Class.forName("com.mysql.jdbc.Driver");
} catch (ClassNotFoundException e) {
throw new SQLException(e);
}
final DataSource dataSource =
JdbcSchema.dataSource("jdbc:mysql://localhost", null, "foodmart",
"foodmart");
rootSchema.add("foodmart",
JdbcSchema.create(optiqConnection.getRootSchema(), "foodmart",
dataSource, "", mysqlSchemaName));
JdbcSchema.dataSource("jdbc:mysql://localhost/foodmart", null,
username, password);
rootSchema.add(mysqlSchemaName,
JdbcSchema.create(optiqConnection.getRootSchema(), mysqlSchemaName,
dataSource, null, mysqlSchemaName));
}

return connection;
Expand Down
Expand Up @@ -23,13 +23,14 @@
import org.eigenbase.relopt.*;
import org.eigenbase.reltype.*;
import org.eigenbase.rex.*;
import org.eigenbase.sql.SqlKind;
import org.eigenbase.sql.SqlOperator;
import org.eigenbase.sql.*;
import org.eigenbase.sql.fun.SqlStdOperatorTable;
import org.eigenbase.sql.type.SqlTypeName;
import org.eigenbase.util.NlsString;
import org.eigenbase.util.Pair;

import com.google.common.collect.ImmutableSet;

import java.util.*;
import java.util.logging.Logger;

Expand All @@ -42,18 +43,18 @@ public class SplunkPushDownRule
StringUtils.getClassTracer(SplunkPushDownRule.class);

private static final Set<SqlKind> SUPPORTED_OPS =
new HashSet<SqlKind>(
Arrays.asList(
SqlKind.EQUALS,
SqlKind.LESS_THAN,
SqlKind.LESS_THAN_OR_EQUAL,
SqlKind.GREATER_THAN,
SqlKind.GREATER_THAN_OR_EQUAL,
SqlKind.NOT_EQUALS,
SqlKind.LIKE,
SqlKind.AND,
SqlKind.OR,
SqlKind.NOT));
ImmutableSet.of(
SqlKind.CAST,
SqlKind.EQUALS,
SqlKind.LESS_THAN,
SqlKind.LESS_THAN_OR_EQUAL,
SqlKind.GREATER_THAN,
SqlKind.GREATER_THAN_OR_EQUAL,
SqlKind.NOT_EQUALS,
SqlKind.LIKE,
SqlKind.AND,
SqlKind.OR,
SqlKind.NOT);

public static final SplunkPushDownRule PROJECT_ON_FILTER =
new SplunkPushDownRule(
Expand Down Expand Up @@ -137,17 +138,11 @@ public void onMatch(RelOptRuleCall call) {

LOGGER.fine("fieldNames: " + getFieldsString(topRow));

filterString = getFilter(op, operands, "", topRow.getFieldNames());

if (filterString == null) {
// can't handle - exit and stop optimizer from calling
// any SplunkUdxRel related optimizations
transformToFarragoUdxRel(
call, splunkRel,
filter,
topProj,
bottomProj);
return;
final StringBuilder buf = new StringBuilder();
if (getFilter(op, operands, buf, topRow.getFieldNames())) {
filterString = buf.toString();
} else {
return; // can't handle
}
} else {
filterString = "";
Expand Down Expand Up @@ -283,64 +278,69 @@ private static RelNode addProjectionRule(ProjectRel proj, RelNode rel) {
// TODO: refactor this to use more tree like parsing, need to also
// make sure we use parens properly - currently precedence
// rules are simply left to right
private String getFilter(
SqlOperator op,
List<RexNode> operands,
String s,
List<String> fieldNames) {
private boolean getFilter(SqlOperator op, List<RexNode> operands,
StringBuilder s, List<String> fieldNames) {
if (!valid(op.getKind())) {
return null;
return false;
}

// NOT op pre-pended
if (op.equals(SqlStdOperatorTable.NOT)) {
s = s.concat(" NOT ");
boolean like = false;
switch (op.getKind()) {
case NOT:
// NOT op pre-pended
s = s.append(" NOT ");
break;
case CAST:
return asd(false, operands, s, fieldNames, 0);
case LIKE:
like = true;
break;
}

for (int i = 0; i < operands.size(); i++) {
final RexNode operand = operands.get(i);
if (operand instanceof RexCall) {
s = s.concat("(");
final RexCall call = (RexCall) operand;
s = getFilter(
call.getOperator(),
call.getOperands(),
s,
fieldNames);
if (s == null) {
return null;
}
s = s.concat(")");
if (i != (operands.size() - 1)) {
s = s.concat(" " + op.toString() + " ");
}
} else {
if (operands.size() != 2) {
return null;
}
if (operand instanceof RexInputRef) {
if (i != 0) {
return null; // must be of form field=value
}

int fieldIndex = ((RexInputRef) operand).getIndex();
String name = fieldNames.get(fieldIndex);
s = s.concat(name);
} else { // RexLiteral
RexLiteral lit = (RexLiteral) operand;

String tmp = toString(op, lit);
if (tmp == null) {
return null;
}
s = s.concat(tmp);
if (!asd(like, operands, s, fieldNames, i)) {
return false;
}
if (op instanceof SqlBinaryOperator && i == 0) {
s.append(" ").append(op).append(" ");
}
}
return true;
}

private boolean asd(boolean like, List<RexNode> operands, StringBuilder s,
List<String> fieldNames, int i) {
RexNode operand = operands.get(i);
if (operand instanceof RexCall) {
s.append("(");
final RexCall call = (RexCall) operand;
boolean b =
getFilter(
call.getOperator(),
call.getOperands(),
s,
fieldNames);
if (!b) {
return false;
}
s.append(")");
} else {
if (operand instanceof RexInputRef) {
if (i != 0) {
return false;
}
if (i == 0) {
s = s.concat(toString(op));
int fieldIndex = ((RexInputRef) operand).getIndex();
String name = fieldNames.get(fieldIndex);
s.append(name);
} else { // RexLiteral
String tmp = toString(like, (RexLiteral) operand);
if (tmp == null) {
return false;
}
s.append(tmp);
}
}
return s;
return true;
}

private boolean valid(SqlKind kind) {
Expand Down Expand Up @@ -381,14 +381,14 @@ public static String searchEscape(String str) {
return str;
}

private String toString(SqlOperator op, RexLiteral literal) {
private String toString(boolean like, RexLiteral literal) {
String value = null;
SqlTypeName litSqlType = literal.getTypeName();
if (SqlTypeName.NUMERIC_TYPES.contains(litSqlType)) {
value = literal.getValue().toString();
} else if (litSqlType.equals(SqlTypeName.CHAR)) {
value = ((NlsString) literal.getValue()).getValue();
if (op.equals(SqlStdOperatorTable.LIKE)) {
if (like) {
value = value.replaceAll("%", "*");
}
value = searchEscape(value);
Expand Down

0 comments on commit c30d6f1

Please sign in to comment.