diff --git a/.github/workflows/sql-cli-release-workflow.yml b/.github/workflows/sql-cli-release-workflow.yml index 1afc480280..6f523b8c20 100644 --- a/.github/workflows/sql-cli-release-workflow.yml +++ b/.github/workflows/sql-cli-release-workflow.yml @@ -47,6 +47,9 @@ jobs: tarball=`ls ./dist/*.tar.gz` wheel=`ls ./dist/*.whl` + renamed_wheel=`echo $wheel | sed 's/_/-/g'` + mv "$wheel" "$renamed_wheel" + # Inject the build number before the suffix tarball_outfile=`basename ${tarball%.tar.gz}-build-${GITHUB_RUN_NUMBER}.tar.gz` wheel_outfile=`basename ${wheel%.whl}-build-${GITHUB_RUN_NUMBER}.whl` diff --git a/.github/workflows/sql-odbc-release-workflow.yml b/.github/workflows/sql-odbc-release-workflow.yml index f0dba6c00b..0571001526 100644 --- a/.github/workflows/sql-odbc-release-workflow.yml +++ b/.github/workflows/sql-odbc-release-workflow.yml @@ -11,6 +11,8 @@ env: ODBC_BIN_PATH: "./build/odbc/bin" ODBC_BUILD_PATH: "./build/odbc/build" AWS_SDK_INSTALL_PATH: "./build/aws-sdk/install" + PLUGIN_NAME: opendistro-sql-odbc + OD_VERSION: 1.12.0.0 jobs: build-mac: @@ -79,7 +81,10 @@ jobs: if: success() run: | cd installer - pkg=`ls -1t *.pkg | grep "macos" | head -1` + + pkg=`ls -1t *.pkg | grep "Open Distro for Elasticsearch SQL ODBC Driver" | head -1` + mv "$pkg" "${{ env.PLUGIN_NAME }}-${{ env.OD_VERSION }}-macos-x64.pkg" + pkg=`ls -1t *.pkg | grep "${{ env.PLUGIN_NAME }}-${{ env.OD_VERSION }}-macos-x64.pkg" | head -1` # Inject the build number before the suffix pkg_outfile=`basename ${pkg%.pkg}-build-${GITHUB_RUN_NUMBER}.pkg` @@ -134,7 +139,10 @@ jobs: shell: bash run: | cd ci-output/installer - msi=`ls -1t *.msi | grep "x86" | head -1` + + msi=`ls -1t *.msi | grep "Open Distro for Elasticsearch SQL ODBC Driver" | head -1` + mv "$msi" "${{ env.PLUGIN_NAME }}-${{ env.OD_VERSION }}-windows-x86.msi" + msi=`ls -1t *.msi | grep "${{ env.PLUGIN_NAME }}-${{ env.OD_VERSION }}-windows-x86.msi" | head -1` # Inject the build number before the suffix msi_outfile=`basename ${msi%.msi}-build-${GITHUB_RUN_NUMBER}.msi` @@ -189,8 +197,11 @@ jobs: shell: bash run: | cd ci-output/installer - msi=`ls -1t *.msi | grep "x64" | head -1` - + + msi=`ls -1t *.msi | grep "Open Distro for Elasticsearch SQL ODBC Driver" | head -1` + mv "$msi" "${{ env.PLUGIN_NAME }}-${{ env.OD_VERSION }}-windows-x64.msi" + msi=`ls -1t *.msi | grep "${{ env.PLUGIN_NAME }}-${{ env.OD_VERSION }}-windows-x64.msi" | head -1` + # Inject the build number before the suffix msi_outfile=`basename ${msi%.msi}-build-${GITHUB_RUN_NUMBER}.msi` @@ -198,3 +209,4 @@ jobs: echo "Copying ${msi} to ${s3_prefix}${msi_outfile}" aws s3 cp --quiet $msi ${s3_prefix}${msi_outfile} + diff --git a/README.md b/README.md index a1e0ba02ae..354904d1cd 100644 --- a/README.md +++ b/README.md @@ -28,23 +28,7 @@ Please refer to the [SQL Language Reference Manual](./docs/user/index.rst), [Pip ## Experimental -Recently we have been actively improving our query engine primarily for better correctness and extensibility. The new enhanced query engine has been already supporting the new released Piped Processing Language query processing behind the scene. Meanwhile, the integration with SQL language is also under way. To try out the power of the new query engine with SQL, simply run the command to enable it by [plugin setting](https://github.com/opendistro-for-elasticsearch/sql/blob/develop/docs/user/admin/settings.rst#opendistro-sql-engine-new-enabled). In future release, this will be enabled by default and nothing required to do from your side. Please stay tuned for updates on our progress and its new exciting features. - -Here is a documentation list with features only available in this improved SQL query engine. Please follow the instruction above to enable it before trying out example queries in these docs: - -* [Identifiers](./docs/user/general/identifiers.rst): support for identifier names with special characters -* [Data types](./docs/user/general/datatypes.rst): new data types such as date time and interval -* [Expressions](./docs/user/dql/expressions.rst): new expression system that can represent and evaluate complex expressions -* [SQL functions](./docs/user/dql/functions.rst): many more string and date functions added -* [Basic queries](./docs/user/dql/basics.rst) - * Ordering by Aggregate Functions section - * NULLS FIRST/LAST in section Specifying Order for Null -* [Aggregations](./docs/user/dql/aggregations.rst): aggregation over expression and more other features -* [Complex queries](./docs/user/dql/complex.rst) - * Improvement on Subqueries in FROM clause -* [Window functions](./docs/user/dql/window.rst): ranking and aggregate window function support - -To avoid impact on your side, normally you won't see any difference in query response. If you want to check if and why your query falls back to be handled by old SQL engine, please explain your query and check Elasticsearch log for "Request is falling back to old SQL engine due to ...". +Recently we have been actively improving our query engine primarily for better correctness and extensibility. Behind the scene, the new enhanced engine has already supported the new released Piped Processing Language. However, it was experimental and disabled by default for SQL query processing. With most important features and full testing complete, now we're ready to promote it as our default SQL query engine. Please find more details in [An Introduction to the New SQL Query Engine](/docs/dev/NewSQLEngine.md). ## Setup diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzer.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzer.java index 4738d25b74..6d123b45a8 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzer.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzer.java @@ -259,9 +259,9 @@ private Expression visitIdentifier(String ident, AnalysisContext context) { return ref; } + // Array type is not supporte yet. private boolean isTypeNotSupported(ExprType type) { - return "struct".equalsIgnoreCase(type.typeName()) - || "array".equalsIgnoreCase(type.typeName()); + return "array".equalsIgnoreCase(type.typeName()); } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/QualifierAnalyzer.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/QualifierAnalyzer.java index cbb0541c89..9b732c6c78 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/QualifierAnalyzer.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/analysis/QualifierAnalyzer.java @@ -51,12 +51,25 @@ public String unqualified(QualifiedName fullName) { private boolean isQualifierIndexOrAlias(QualifiedName fullName) { Optional qualifier = fullName.first(); if (qualifier.isPresent()) { + if (isFieldName(qualifier.get())) { + return false; + } resolveQualifierSymbol(fullName, qualifier.get()); return true; } return false; } + private boolean isFieldName(String qualifier) { + try { + // Resolve the qualifier in Namespace.FIELD_NAME + context.peek().resolve(new Symbol(Namespace.FIELD_NAME, qualifier)); + return true; + } catch (SemanticCheckException e2) { + return false; + } + } + private void resolveQualifierSymbol(QualifiedName fullName, String qualifier) { try { context.peek().resolve(new Symbol(Namespace.INDEX_NAME, qualifier)); @@ -64,8 +77,8 @@ private void resolveQualifierSymbol(QualifiedName fullName, String qualifier) { // Throw syntax check intentionally to indicate fall back to old engine. // Need change to semantic check exception in future. throw new SyntaxCheckException(String.format( - "The qualifier [%s] of qualified name [%s] must be an index name or its alias", - qualifier, fullName)); + "The qualifier [%s] of qualified name [%s] must be an field name, index name or its " + + "alias", qualifier, fullName)); } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/dsl/AstDSL.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/dsl/AstDSL.java index dffcd4c0f1..502b2ffc4d 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/dsl/AstDSL.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/dsl/AstDSL.java @@ -116,7 +116,7 @@ public UnresolvedPlan values(List... values) { return new Values(Arrays.asList(values)); } - public static UnresolvedExpression qualifiedName(String... parts) { + public static QualifiedName qualifiedName(String... parts) { return new QualifiedName(Arrays.asList(parts)); } @@ -178,7 +178,7 @@ public static Literal nullLiteral() { } public static Map map(String origin, String target) { - return new Map(new Field(origin), new Field(target)); + return new Map(field(origin), field(target)); } public static Map map(UnresolvedExpression origin, UnresolvedExpression target) { @@ -281,19 +281,19 @@ public AllFields allFields() { } public Field field(UnresolvedExpression field) { - return new Field((QualifiedName) field); - } - - public Field field(String field) { return new Field(field); } public Field field(UnresolvedExpression field, Argument... fieldArgs) { - return new Field(field, Arrays.asList(fieldArgs)); + return field(field, Arrays.asList(fieldArgs)); + } + + public Field field(String field) { + return field(qualifiedName(field)); } public Field field(String field, Argument... fieldArgs) { - return new Field(field, Arrays.asList(fieldArgs)); + return field(field, Arrays.asList(fieldArgs)); } public Field field(UnresolvedExpression field, List fieldArgs) { @@ -301,7 +301,7 @@ public Field field(UnresolvedExpression field, List fieldArgs) { } public Field field(String field, List fieldArgs) { - return new Field(field, fieldArgs); + return field(qualifiedName(field), fieldArgs); } public Alias alias(String name, UnresolvedExpression expr) { diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/expression/DataType.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/expression/DataType.java index 6d1cf95b0c..13e6b422bd 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/expression/DataType.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/expression/DataType.java @@ -26,7 +26,7 @@ @RequiredArgsConstructor public enum DataType { TYPE_ERROR(ExprCoreType.UNKNOWN), - NULL(ExprCoreType.UNKNOWN), + NULL(ExprCoreType.UNDEFINED), INTEGER(ExprCoreType.INTEGER), LONG(ExprCoreType.LONG), diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/expression/Field.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/expression/Field.java index 198b1c6d94..b92b4003cf 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/expression/Field.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/ast/expression/Field.java @@ -27,21 +27,24 @@ @Getter @ToString @EqualsAndHashCode(callSuper = false) -@AllArgsConstructor public class Field extends UnresolvedExpression { - private UnresolvedExpression field; - private List fieldArgs = Collections.emptyList(); - public Field(QualifiedName field) { - this.field = field; - } + private final UnresolvedExpression field; - public Field(String field) { - this.field = new QualifiedName(field); + private final List fieldArgs; + + /** + * Constructor of Field. + */ + public Field(UnresolvedExpression field) { + this(field, Collections.emptyList()); } - public Field(String field, List fieldArgs) { - this.field = new QualifiedName(field); + /** + * Constructor of Field. + */ + public Field(UnresolvedExpression field, List fieldArgs) { + this.field = field; this.fieldArgs = fieldArgs; } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprDatetimeValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprDatetimeValue.java index aad4e0ed7e..2dd49383ac 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprDatetimeValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprDatetimeValue.java @@ -26,22 +26,37 @@ import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; import java.time.format.DateTimeParseException; +import java.time.temporal.ChronoField; import java.time.temporal.ChronoUnit; import lombok.RequiredArgsConstructor; @RequiredArgsConstructor public class ExprDatetimeValue extends AbstractExprValue { - private static final DateTimeFormatter formatter = DateTimeFormatter - .ofPattern("yyyy-MM-dd HH:mm:ss[.SSSSSS]"); private final LocalDateTime datetime; + private static final DateTimeFormatter FORMATTER_VARIABLE_MICROS; + private static final int MIN_FRACTION_SECONDS = 0; + private static final int MAX_FRACTION_SECONDS = 6; + + static { + FORMATTER_VARIABLE_MICROS = new DateTimeFormatterBuilder() + .appendPattern("yyyy-MM-dd HH:mm:ss") + .appendFraction( + ChronoField.MICRO_OF_SECOND, + MIN_FRACTION_SECONDS, + MAX_FRACTION_SECONDS, + true) + .toFormatter(); + } + /** * Constructor with datetime string as input. */ public ExprDatetimeValue(String datetime) { try { - this.datetime = LocalDateTime.parse(datetime, formatter); + this.datetime = LocalDateTime.parse(datetime, FORMATTER_VARIABLE_MICROS); } catch (DateTimeParseException e) { throw new SemanticCheckException(String.format("datetime:%s in unsupported format, please " + "use yyyy-MM-dd HH:mm:ss[.SSSSSS]", datetime)); diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValue.java index 635d8f2fd4..f2050561a7 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValue.java @@ -17,7 +17,6 @@ import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; -import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; import java.util.Objects; /** @@ -40,7 +39,7 @@ public Object value() { @Override public ExprType type() { - return ExprCoreType.UNKNOWN; + return ExprCoreType.UNDEFINED; } @Override diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValue.java index 2638e62537..321587d1de 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValue.java @@ -49,7 +49,7 @@ public Object value() { @Override public ExprType type() { - return ExprCoreType.UNKNOWN; + return ExprCoreType.UNDEFINED; } @Override diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTimeValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTimeValue.java index 762d48b883..ed36257d3a 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTimeValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTimeValue.java @@ -21,11 +21,11 @@ import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; import com.amazon.opendistroforelasticsearch.sql.exception.SemanticCheckException; import java.time.LocalTime; -import java.time.ZoneId; import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; import java.time.format.DateTimeParseException; +import java.time.temporal.ChronoField; import java.util.Objects; -import lombok.EqualsAndHashCode; import lombok.RequiredArgsConstructor; /** @@ -35,15 +35,30 @@ public class ExprTimeValue extends AbstractExprValue { private final LocalTime time; + private static final DateTimeFormatter FORMATTER_VARIABLE_MICROS; + private static final int MIN_FRACTION_SECONDS = 0; + private static final int MAX_FRACTION_SECONDS = 6; + + static { + FORMATTER_VARIABLE_MICROS = new DateTimeFormatterBuilder() + .appendPattern("HH:mm:ss") + .appendFraction( + ChronoField.MICRO_OF_SECOND, + MIN_FRACTION_SECONDS, + MAX_FRACTION_SECONDS, + true) + .toFormatter(); + } + /** * Constructor. */ public ExprTimeValue(String time) { try { - this.time = LocalTime.parse(time); + this.time = LocalTime.parse(time, FORMATTER_VARIABLE_MICROS); } catch (DateTimeParseException e) { throw new SemanticCheckException(String.format("time:%s in unsupported format, please use " - + "HH:mm:ss", time)); + + "HH:mm:ss[.SSSSSS]", time)); } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTimestampValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTimestampValue.java index 4a81543457..0c9c506569 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTimestampValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTimestampValue.java @@ -26,7 +26,9 @@ import java.time.LocalTime; import java.time.ZoneId; import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; import java.time.format.DateTimeParseException; +import java.time.temporal.ChronoField; import java.time.temporal.ChronoUnit; import java.util.Objects; import lombok.RequiredArgsConstructor; @@ -43,18 +45,33 @@ public class ExprTimestampValue extends AbstractExprValue { /** * todo. only support timestamp in format yyyy-MM-dd HH:mm:ss. */ - private static final DateTimeFormatter FORMATTER = DateTimeFormatter - .ofPattern("yyyy-MM-dd HH:mm:ss[.SSSSSS]"); private static final DateTimeFormatter FORMATTER_WITNOUT_NANO = DateTimeFormatter .ofPattern("yyyy-MM-dd HH:mm:ss"); private final Instant timestamp; + private static final DateTimeFormatter FORMATTER_VARIABLE_MICROS; + private static final int MIN_FRACTION_SECONDS = 0; + private static final int MAX_FRACTION_SECONDS = 6; + + static { + FORMATTER_VARIABLE_MICROS = new DateTimeFormatterBuilder() + .appendPattern("yyyy-MM-dd HH:mm:ss") + .appendFraction( + ChronoField.MICRO_OF_SECOND, + MIN_FRACTION_SECONDS, + MAX_FRACTION_SECONDS, + true) + .toFormatter(); + } + /** * Constructor. */ public ExprTimestampValue(String timestamp) { try { - this.timestamp = LocalDateTime.parse(timestamp, FORMATTER).atZone(ZONE).toInstant(); + this.timestamp = LocalDateTime.parse(timestamp, FORMATTER_VARIABLE_MICROS) + .atZone(ZONE) + .toInstant(); } catch (DateTimeParseException e) { throw new SemanticCheckException(String.format("timestamp:%s in unsupported format, please " + "use yyyy-MM-dd HH:mm:ss[.SSSSSS]", timestamp)); @@ -66,7 +83,7 @@ public ExprTimestampValue(String timestamp) { public String value() { return timestamp.getNano() == 0 ? FORMATTER_WITNOUT_NANO.withZone(ZONE) .format(timestamp.truncatedTo(ChronoUnit.SECONDS)) - : FORMATTER.withZone(ZONE).format(timestamp); + : FORMATTER_VARIABLE_MICROS.withZone(ZONE).format(timestamp); } @Override diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTupleValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTupleValue.java index fb8cd658a5..05ba8412b6 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTupleValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprTupleValue.java @@ -64,8 +64,7 @@ public String toString() { @Override public BindingTuple bindingTuples() { - return new LazyBindingTuple( - bindingName -> valueMap.getOrDefault(bindingName, ExprMissingValue.of())); + return new LazyBindingTuple(() -> this); } @Override @@ -73,6 +72,11 @@ public Map tupleValue() { return valueMap; } + @Override + public ExprValue keyValue(String key) { + return valueMap.getOrDefault(key, ExprMissingValue.of()); + } + /** * Override the equals method. * @return true for equal, otherwise false. diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValue.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValue.java index 7b98c0b4cf..0faa314857 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValue.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprValue.java @@ -196,4 +196,12 @@ default List collectionValue() { throw new ExpressionEvaluationException( "invalid to get collectionValue from value of type " + type()); } + + /** + * Get the value specified by key from {@link ExprTupleValue}. + * This method only be implemented in {@link ExprTupleValue}. + */ + default ExprValue keyValue(String key) { + return ExprMissingValue.of(); + } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/type/ExprCoreType.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/type/ExprCoreType.java index 48202206a9..7928dbb85f 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/type/ExprCoreType.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/data/type/ExprCoreType.java @@ -19,6 +19,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -29,14 +30,21 @@ */ public enum ExprCoreType implements ExprType { /** - * UNKNOWN. + * Unknown due to unsupported data type. */ UNKNOWN, + /** + * Undefined type for special literal such as NULL. + * As the root of data type tree, it is compatible with any other type. + * In other word, undefined type is the "narrowest" type. + */ + UNDEFINED, + /** * Numbers. */ - BYTE, + BYTE(UNDEFINED), SHORT(BYTE), INTEGER(SHORT), LONG(INTEGER), @@ -46,38 +54,38 @@ public enum ExprCoreType implements ExprType { /** * Boolean. */ - BOOLEAN, + BOOLEAN(UNDEFINED), /** * String. */ - STRING, + STRING(UNDEFINED), /** * Date. * Todo. compatible relationship. */ - TIMESTAMP, - DATE, - TIME, - DATETIME, - INTERVAL, + TIMESTAMP(UNDEFINED), + DATE(UNDEFINED), + TIME(UNDEFINED), + DATETIME(UNDEFINED), + INTERVAL(UNDEFINED), /** * Struct. */ - STRUCT, + STRUCT(UNDEFINED), /** * Array. */ - ARRAY; + ARRAY(UNDEFINED); /** - * Parent of current base type. + * Parents (wider/compatible types) of current base type. */ - private ExprCoreType parent; + private final List parents = new ArrayList<>(); /** * The mapping between Type and legacy JDBC type name. @@ -91,13 +99,13 @@ public enum ExprCoreType implements ExprType { ExprCoreType(ExprCoreType... compatibleTypes) { for (ExprCoreType subType : compatibleTypes) { - subType.parent = this; + subType.parents.add(this); } } @Override public List getParent() { - return Arrays.asList(parent == null ? UNKNOWN : parent); + return parents.isEmpty() ? ExprType.super.getParent() : parents; } @Override @@ -113,9 +121,11 @@ public String legacyTypeName() { /** * Return all the valid ExprCoreType. */ - public static List coreTypes() { - return Arrays.stream(ExprCoreType.values()).filter(type -> type != UNKNOWN) - .collect(Collectors.toList()); + public static List coreTypes() { + return Arrays.stream(ExprCoreType.values()) + .filter(type -> type != UNKNOWN) + .filter(type -> type != UNDEFINED) + .collect(Collectors.toList()); } public static List numberTypes() { diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/DSL.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/DSL.java index 2aba72f407..0d03ddc536 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/DSL.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/DSL.java @@ -507,6 +507,10 @@ private Aggregator aggregate(BuiltinFunctionName functionName, Expression... exp } public FunctionExpression isnull(Expression... expressions) { + return function(BuiltinFunctionName.ISNULL, expressions); + } + + public FunctionExpression is_null(Expression... expressions) { return function(BuiltinFunctionName.IS_NULL, expressions); } @@ -514,6 +518,18 @@ public FunctionExpression isnotnull(Expression... expressions) { return function(BuiltinFunctionName.IS_NOT_NULL, expressions); } + public FunctionExpression ifnull(Expression... expressions) { + return function(BuiltinFunctionName.IFNULL, expressions); + } + + public FunctionExpression nullif(Expression... expressions) { + return function(BuiltinFunctionName.NULLIF, expressions); + } + + public FunctionExpression iffunction(Expression... expressions) { + return function(BuiltinFunctionName.IF, expressions); + } + public static Expression cases(Expression defaultResult, WhenClause... whenClauses) { return new CaseClause(Arrays.asList(whenClauses), defaultResult); diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpression.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpression.java index ba3a8f4e87..aa0a6e7b94 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpression.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpression.java @@ -15,9 +15,14 @@ package com.amazon.opendistroforelasticsearch.sql.expression; +import static com.amazon.opendistroforelasticsearch.sql.utils.ExpressionUtils.PATH_SEP; + +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprTupleValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; import com.amazon.opendistroforelasticsearch.sql.expression.env.Environment; +import java.util.Arrays; +import java.util.List; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.RequiredArgsConstructor; @@ -28,8 +33,23 @@ public class ReferenceExpression implements Expression { @Getter private final String attr; + @Getter + private final List paths; + private final ExprType type; + /** + * Constructor of ReferenceExpression. + * @param ref the field name. e.g. addr.state/addr. + * @param type type. + */ + public ReferenceExpression(String ref, ExprType type) { + this.attr = ref; + // Todo. the define of paths need to be redefined after adding multiple index/variable support. + this.paths = Arrays.asList(ref.split("\\.")); + this.type = type; + } + @Override public ExprValue valueOf(Environment env) { return env.resolve(this); @@ -49,4 +69,51 @@ public T accept(ExpressionNodeVisitor visitor, C context) { public String toString() { return attr; } + + /** + * Resolve the ExprValue from {@link ExprTupleValue} using paths. + * Considering the following sample data. + * { + * "name": "bob smith" + * "project.year": 1990, + * "project": { + * "year": "2020" + * } + * "address": { + * "state": "WA", + * "city": "seattle", + * "project.year": 1990 + * } + * "address.local": { + * "state": "WA", + * } + * } + * The paths could be + * 1. top level, e.g. "name", which will be resolved as "bob smith" + * 2. multiple paths, e.g. "name.address.state", which will be resolved as "WA" + * 3. special case, the "." is the path separator, but it is possible that the path include + * ".", for handling this use case, we define the resolve rule as bellow, e.g. "project.year" is + * resolved as 1990 instead of 2020. Note. This logic only applied top level none object field. + * e.g. "address.local.state" been resolved to Missing. but "address.project.year" could been + * resolved as 1990. + * + *

Resolve Rule + * 1. Resolve the full name by combine the paths("x"."y"."z") as whole ("x.y.z"). + * 2. Resolve the path recursively through ExprValue. + * + * @param value {@link ExprTupleValue}. + * @return {@link ExprTupleValue}. + */ + public ExprValue resolve(ExprTupleValue value) { + return resolve(value, paths); + } + + private ExprValue resolve(ExprValue value, List paths) { + final ExprValue wholePathValue = value.keyValue(String.join(PATH_SEP, paths)); + if (!wholePathValue.isMissing() || paths.size() == 1) { + return wholePathValue; + } else { + return resolve(value.keyValue(paths.get(0)), paths.subList(1, paths.size())); + } + } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/conditional/cases/CaseClause.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/conditional/cases/CaseClause.java index f00a6cfd12..2303bdae1d 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/conditional/cases/CaseClause.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/conditional/cases/CaseClause.java @@ -16,7 +16,7 @@ package com.amazon.opendistroforelasticsearch.sql.expression.conditional.cases; -import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.UNKNOWN; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.UNDEFINED; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprNullValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; @@ -75,8 +75,8 @@ public ExprValue valueOf(Environment valueEnv) { public ExprType type() { List types = allResultTypes(); - // Return unknown if all WHEN/ELSE return NULL - return types.isEmpty() ? UNKNOWN : types.get(0); + // Return undefined if all WHEN/ELSE return NULL + return types.isEmpty() ? UNDEFINED : types.get(0); } @Override @@ -98,7 +98,7 @@ public List allResultTypes() { types.add(defaultResult.type()); } - types.removeIf(type -> (type == UNKNOWN)); + types.removeIf(type -> (type == UNDEFINED)); return types; } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/IntervalClause.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/IntervalClause.java index dad231fe62..8376be4163 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/IntervalClause.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/IntervalClause.java @@ -24,6 +24,7 @@ import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; import static com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionDSL.define; import static com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionDSL.impl; +import static com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionDSL.nullMissingHandling; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprIntervalValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; @@ -54,8 +55,8 @@ public void register(BuiltinFunctionRepository repository) { private FunctionResolver interval() { return define(BuiltinFunctionName.INTERVAL.getName(), - impl(IntervalClause::interval, INTERVAL, INTEGER, STRING), - impl(IntervalClause::interval, INTERVAL, LONG, STRING)); + impl(nullMissingHandling(IntervalClause::interval), INTERVAL, INTEGER, STRING), + impl(nullMissingHandling(IntervalClause::interval), INTERVAL, LONG, STRING)); } private ExprValue interval(ExprValue value, ExprValue unit) { diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/BuiltinFunctionName.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/BuiltinFunctionName.java index 3c009ea067..6b29c68da1 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/BuiltinFunctionName.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/function/BuiltinFunctionName.java @@ -138,6 +138,10 @@ public enum BuiltinFunctionName { */ IS_NULL(FunctionName.of("is null")), IS_NOT_NULL(FunctionName.of("is not null")), + IFNULL(FunctionName.of("ifnull")), + IF(FunctionName.of("if")), + NULLIF(FunctionName.of("nullif")), + ISNULL(FunctionName.of("isnull")), ROW_NUMBER(FunctionName.of("row_number")), RANK(FunctionName.of("rank")), diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/UnaryPredicateOperator.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/UnaryPredicateOperator.java index 18a621acb3..01e9aa99ca 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/UnaryPredicateOperator.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/UnaryPredicateOperator.java @@ -15,16 +15,29 @@ package com.amazon.opendistroforelasticsearch.sql.expression.operator.predicate; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_NULL; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_TRUE; + import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.BOOLEAN; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.UNKNOWN; + +import static com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionDSL.impl; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprBooleanValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; import com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionName; import com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionRepository; +import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionBuilder; import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionDSL; +import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionName; import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionResolver; +import com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionSignature; +import com.amazon.opendistroforelasticsearch.sql.expression.function.SerializableFunction; + import java.util.Arrays; +import java.util.List; import java.util.stream.Collectors; import lombok.experimental.UtilityClass; @@ -39,8 +52,12 @@ public class UnaryPredicateOperator { */ public static void register(BuiltinFunctionRepository repository) { repository.register(not()); - repository.register(isNull()); repository.register(isNotNull()); + repository.register(ifNull()); + repository.register(nullIf()); + repository.register(isNull(BuiltinFunctionName.IS_NULL)); + repository.register(isNull(BuiltinFunctionName.ISNULL)); + repository.register(ifFunction()); } private static FunctionResolver not() { @@ -64,10 +81,9 @@ public ExprValue not(ExprValue v) { } } - private static FunctionResolver isNull() { - + private static FunctionResolver isNull(BuiltinFunctionName funcName) { return FunctionDSL - .define(BuiltinFunctionName.IS_NULL.getName(), Arrays.stream(ExprCoreType.values()) + .define(funcName.getName(), Arrays.stream(ExprCoreType.values()) .map(type -> FunctionDSL .impl((v) -> ExprBooleanValue.of(v.isNull()), BOOLEAN, type)) .collect( @@ -82,4 +98,65 @@ private static FunctionResolver isNotNull() { .collect( Collectors.toList())); } + + private static FunctionResolver ifFunction() { + FunctionName functionName = BuiltinFunctionName.IF.getName(); + List typeList = ExprCoreType.coreTypes(); + + List>> functionsOne = typeList.stream().map(v -> + impl((UnaryPredicateOperator::exprIf), v, BOOLEAN, v, v)) + .collect(Collectors.toList()); + + FunctionResolver functionResolver = FunctionDSL.define(functionName, functionsOne); + return functionResolver; + } + + private static FunctionResolver ifNull() { + FunctionName functionName = BuiltinFunctionName.IFNULL.getName(); + List typeList = ExprCoreType.coreTypes(); + + List>> functionsOne = typeList.stream().map(v -> + impl((UnaryPredicateOperator::exprIfNull), v, v, v)) + .collect(Collectors.toList()); + + FunctionResolver functionResolver = FunctionDSL.define(functionName, functionsOne); + return functionResolver; + } + + private static FunctionResolver nullIf() { + FunctionName functionName = BuiltinFunctionName.NULLIF.getName(); + List typeList = ExprCoreType.coreTypes(); + + FunctionResolver functionResolver = + FunctionDSL.define(functionName, + typeList.stream().map(v -> + impl((UnaryPredicateOperator::exprNullIf), v, v, v)) + .collect(Collectors.toList())); + return functionResolver; + } + + /** v2 if v1 is null. + * @param v1 varable 1 + * @param v2 varable 2 + * @return v2 if v1 is null + */ + public static ExprValue exprIfNull(ExprValue v1, ExprValue v2) { + return (v1.isNull() || v1.isMissing()) ? v2 : v1; + } + + /** return null if v1 equls to v2. + * @param v1 varable 1 + * @param v2 varable 2 + * @return null if v1 equls to v2 + */ + public static ExprValue exprNullIf(ExprValue v1, ExprValue v2) { + return v1.equals(v2) ? LITERAL_NULL : v1; + } + + public static ExprValue exprIf(ExprValue v1, ExprValue v2, ExprValue v3) { + return !v1.isNull() && !v1.isMissing() && LITERAL_TRUE.equals(v1) ? v2 : v3; + } + } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/storage/bindingtuple/LazyBindingTuple.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/storage/bindingtuple/LazyBindingTuple.java index f5e29594b9..97a16323f1 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/storage/bindingtuple/LazyBindingTuple.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/storage/bindingtuple/LazyBindingTuple.java @@ -15,9 +15,10 @@ package com.amazon.opendistroforelasticsearch.sql.storage.bindingtuple; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprTupleValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; import com.amazon.opendistroforelasticsearch.sql.expression.ReferenceExpression; -import java.util.function.Function; +import java.util.function.Supplier; import lombok.RequiredArgsConstructor; /** @@ -25,10 +26,10 @@ */ @RequiredArgsConstructor public class LazyBindingTuple extends BindingTuple { - private final Function lazyBinding; + private final Supplier lazyBinding; @Override public ExprValue resolve(ReferenceExpression ref) { - return lazyBinding.apply(ref.getAttr()); + return ref.resolve(lazyBinding.get()); } } diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/utils/ExpressionUtils.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/utils/ExpressionUtils.java index dcb8b10239..9b92b06d0f 100644 --- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/utils/ExpressionUtils.java +++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/utils/ExpressionUtils.java @@ -26,6 +26,8 @@ @UtilityClass public class ExpressionUtils { + public static String PATH_SEP = "."; + /** * Format the list of {@link Expression}. */ diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzerTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzerTest.java index c3ecc4c8be..7e6fa063ea 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzerTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/ExpressionAnalyzerTest.java @@ -194,13 +194,20 @@ public void qualified_name_with_qualifier() { qualifiedName("index_alias", "integer_value") ); - analysisContext.peek().define(new Symbol(Namespace.FIELD_NAME, "nested_field"), STRUCT); + analysisContext.peek().define(new Symbol(Namespace.FIELD_NAME, "object_field"), STRUCT); + analysisContext.peek().define(new Symbol(Namespace.FIELD_NAME, "object_field.integer_value"), + INTEGER); + assertAnalyzeEqual( + DSL.ref("object_field.integer_value", INTEGER), + qualifiedName("object_field", "integer_value") + ); + SyntaxCheckException exception = assertThrows(SyntaxCheckException.class, () -> analyze(qualifiedName("nested_field", "integer_value"))); assertEquals( "The qualifier [nested_field] of qualified name [nested_field.integer_value] " - + "must be an index name or its alias", + + "must be an field name, index name or its alias", exception.getMessage() ); analysisContext.pop(); @@ -237,17 +244,6 @@ public void case_clause() { AstDSL.stringLiteral("test")))); } - @Test - public void skip_struct_data_type() { - SyntaxCheckException exception = - assertThrows(SyntaxCheckException.class, - () -> analyze(qualifiedName("struct_value"))); - assertEquals( - "Identifier [struct_value] of type [STRUCT] is not supported yet", - exception.getMessage() - ); - } - @Test public void skip_array_data_type() { SyntaxCheckException exception = diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/QualifierAnalyzerTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/QualifierAnalyzerTest.java index 4b7014309f..eb8638816a 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/QualifierAnalyzerTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/analysis/QualifierAnalyzerTest.java @@ -52,11 +52,11 @@ void should_return_original_name_if_no_qualifier() { @Test void should_report_error_if_qualifier_is_not_index() { - runInScope(new Symbol(Namespace.FIELD_NAME, "a"), ARRAY, () -> { + runInScope(new Symbol(Namespace.FIELD_NAME, "aIndex"), ARRAY, () -> { SyntaxCheckException error = assertThrows(SyntaxCheckException.class, () -> qualifierAnalyzer.unqualified("a", "integer_value")); assertEquals("The qualifier [a] of qualified name [a.integer_value] " - + "must be an index name or its alias", error.getMessage()); + + "must be an field name, index name or its alias", error.getMessage()); }); } @@ -65,7 +65,8 @@ void should_report_error_if_qualifier_is_not_exist() { SyntaxCheckException error = assertThrows(SyntaxCheckException.class, () -> qualifierAnalyzer.unqualified("a", "integer_value")); assertEquals( - "The qualifier [a] of qualified name [a.integer_value] must be an index name or its alias", + "The qualifier [a] of qualified name [a.integer_value] must be an field name, index name " + + "or its alias", error.getMessage()); } @@ -76,6 +77,13 @@ void should_return_qualified_name_if_qualifier_is_index() { ); } + @Test + void should_return_qualified_name_if_qualifier_is_field() { + runInScope(new Symbol(Namespace.FIELD_NAME, "a"), STRUCT, () -> + assertEquals("a.integer_value", qualifierAnalyzer.unqualified("a", "integer_value")) + ); + } + @Test void should_report_error_if_more_parts_in_qualified_name() { runInScope(new Symbol(Namespace.INDEX_NAME, "a"), STRUCT, () -> diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/DateTimeValueTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/DateTimeValueTest.java index 410ea77453..294c21ec52 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/DateTimeValueTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/DateTimeValueTest.java @@ -34,6 +34,8 @@ public class DateTimeValueTest { + private static final int MICROS_PRECISION_MAX = 6; + @Test public void timeValueInterfaceTest() { ExprValue timeValue = new ExprTimeValue("01:01:01"); @@ -103,7 +105,7 @@ public void dateInUnsupportedFormat() { public void timeInUnsupportedFormat() { SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> new ExprTimeValue("01:01:0")); - assertEquals("time:01:01:0 in unsupported format, please use HH:mm:ss", + assertEquals("time:01:01:0 in unsupported format, please use HH:mm:ss[.SSSSSS]", exception.getMessage()); } @@ -172,7 +174,96 @@ public void stringTimeValue() { SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> new ExprStringValue("01:01:0").timeValue()); - assertEquals("time:01:01:0 in unsupported format, please use HH:mm:ss", + assertEquals("time:01:01:0 in unsupported format, please use HH:mm:ss[.SSSSSS]", + exception.getMessage()); + } + + @Test + public void timeWithVariableMicroPrecision() { + String timeWithMicrosFormat = "10:11:12.%s"; + + // Check all lengths of microsecond precision, up to max precision accepted + StringBuilder micros = new StringBuilder(); + for (int microPrecision = 1; microPrecision <= MICROS_PRECISION_MAX; microPrecision++) { + micros.append(microPrecision); + String timeWithMicros = String.format(timeWithMicrosFormat, micros); + + ExprValue timeValue = new ExprTimeValue(timeWithMicros); + assertEquals(LocalTime.parse(timeWithMicros), timeValue.timeValue()); + } + } + + @Test + public void timestampWithVariableMicroPrecision() { + String dateValue = "2020-08-17"; + String timeWithMicrosFormat = "10:11:12.%s"; + + // Check all lengths of microsecond precision, up to max precision accepted + StringBuilder micros = new StringBuilder(); + for (int microPrecision = 1; microPrecision <= MICROS_PRECISION_MAX; microPrecision++) { + micros.append(microPrecision); + String timeWithMicros = String.format(timeWithMicrosFormat, micros); + + String timestampString = String.format("%s %s", dateValue, timeWithMicros); + ExprValue timestampValue = new ExprTimestampValue(timestampString); + + assertEquals(LocalDate.parse(dateValue), timestampValue.dateValue()); + assertEquals(LocalTime.parse(timeWithMicros), timestampValue.timeValue()); + String localDateTime = String.format("%sT%s", dateValue, timeWithMicros); + assertEquals(LocalDateTime.parse(localDateTime), timestampValue.datetimeValue()); + } + } + + @Test + public void datetimeWithVariableMicroPrecision() { + String dateValue = "2020-08-17"; + String timeWithMicrosFormat = "10:11:12.%s"; + + // Check all lengths of microsecond precision, up to max precision accepted + StringBuilder micros = new StringBuilder(); + for (int microPrecision = 1; microPrecision <= MICROS_PRECISION_MAX; microPrecision++) { + micros.append(microPrecision); + String timeWithMicros = String.format(timeWithMicrosFormat, micros); + + String datetimeString = String.format("%s %s", dateValue, timeWithMicros); + ExprValue datetimeValue = new ExprDatetimeValue(datetimeString); + + assertEquals(LocalDate.parse(dateValue), datetimeValue.dateValue()); + assertEquals(LocalTime.parse(timeWithMicros), datetimeValue.timeValue()); + String localDateTime = String.format("%sT%s", dateValue, timeWithMicros); + assertEquals(LocalDateTime.parse(localDateTime), datetimeValue.datetimeValue()); + } + } + + @Test + public void timestampOverMaxMicroPrecision() { + SemanticCheckException exception = + assertThrows(SemanticCheckException.class, + () -> new ExprTimestampValue("2020-07-07 01:01:01.1234567")); + assertEquals( + "timestamp:2020-07-07 01:01:01.1234567 in unsupported format, " + + "please use yyyy-MM-dd HH:mm:ss[.SSSSSS]", + exception.getMessage()); + } + + @Test + public void datetimeOverMaxMicroPrecision() { + SemanticCheckException exception = + assertThrows(SemanticCheckException.class, + () -> new ExprDatetimeValue("2020-07-07 01:01:01.1234567")); + assertEquals( + "datetime:2020-07-07 01:01:01.1234567 in unsupported format, " + + "please use yyyy-MM-dd HH:mm:ss[.SSSSSS]", + exception.getMessage()); + } + + @Test + public void timeOverMaxMicroPrecision() { + SemanticCheckException exception = + assertThrows(SemanticCheckException.class, + () -> new ExprTimeValue("01:01:01.1234567")); + assertEquals( + "time:01:01:01.1234567 in unsupported format, please use HH:mm:ss[.SSSSSS]", exception.getMessage()); } } diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValueTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValueTest.java index 26f2ea3699..583e61b749 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValueTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprMissingValueTest.java @@ -43,7 +43,7 @@ public void getValue() { @Test public void getType() { - assertEquals(ExprCoreType.UNKNOWN, LITERAL_MISSING.type()); + assertEquals(ExprCoreType.UNDEFINED, LITERAL_MISSING.type()); } @Test diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValueTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValueTest.java index ca4c85cb6e..fd3103c37b 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValueTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNullValueTest.java @@ -42,7 +42,7 @@ public void getValue() { @Test public void getType() { - assertEquals(ExprCoreType.UNKNOWN, LITERAL_NULL.type()); + assertEquals(ExprCoreType.UNDEFINED, LITERAL_NULL.type()); } @Test diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNumberValueTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNumberValueTest.java index fe33301818..d1e20d8a17 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNumberValueTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/model/ExprNumberValueTest.java @@ -18,6 +18,7 @@ package com.amazon.opendistroforelasticsearch.sql.data.model; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; import org.junit.jupiter.api.Assertions; @@ -31,4 +32,9 @@ public void getShortValueFromIncompatibleExprValue() { .assertThrows(ExpressionEvaluationException.class, () -> booleanValue.shortValue()); assertEquals("invalid to get shortValue from value of type BOOLEAN", exception.getMessage()); } + + @Test + public void key_value() { + assertTrue(new ExprIntegerValue(1).keyValue("path").isMissing()); + } } diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/type/ExprTypeTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/type/ExprTypeTest.java index 0e1b4f859e..215dd370eb 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/type/ExprTypeTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/data/type/ExprTypeTest.java @@ -25,6 +25,7 @@ import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.SHORT; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRUCT; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.UNDEFINED; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.UNKNOWN; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -51,6 +52,12 @@ public void isCompatible() { assertFalse(INTEGER.isCompatible(UNKNOWN)); } + @Test + public void isCompatibleWithUndefined() { + ExprCoreType.coreTypes().forEach(type -> assertTrue(type.isCompatible(UNDEFINED))); + ExprCoreType.coreTypes().forEach(type -> assertFalse(UNDEFINED.isCompatible(type))); + } + @Test public void getParent() { assertThat(((ExprType) () -> "test").getParent(), Matchers.contains(UNKNOWN)); diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ExpressionTestBase.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ExpressionTestBase.java index 1caeba23cc..9a18e00297 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ExpressionTestBase.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ExpressionTestBase.java @@ -63,7 +63,7 @@ public class ExpressionTestBase { protected Environment typeEnv; @Bean - protected Environment valueEnv() { + protected static Environment valueEnv() { return var -> { if (var instanceof ReferenceExpression) { switch (((ReferenceExpression) var).getAttr()) { diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpressionTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpressionTest.java index a5d2b934f1..6faafcb0c7 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpressionTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpressionTest.java @@ -35,11 +35,16 @@ import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.LONG; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRUCT; +import static com.amazon.opendistroforelasticsearch.sql.expression.DSL.ref; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprIntegerValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprStringValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprTupleValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; -import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.junit.jupiter.api.DisplayNameGeneration; @@ -77,4 +82,91 @@ public void resolve_type() { assertEquals(ExprCoreType.STRUCT, DSL.ref("struct_value", STRUCT).type()); assertEquals(ExprCoreType.ARRAY, DSL.ref("array_value", ARRAY).type()); } + + @Test + public void path_as_whole_has_highest_priority() { + ReferenceExpression expr = new ReferenceExpression("project.year", INTEGER); + ExprValue actualValue = expr.resolve(tuple()); + + assertEquals(INTEGER, actualValue.type()); + assertEquals(1990, actualValue.integerValue()); + } + + @Test + public void one_path_value() { + ReferenceExpression expr = ref("name", STRING); + ExprValue actualValue = expr.resolve(tuple()); + + assertEquals(STRING, actualValue.type()); + assertEquals("bob smith", actualValue.stringValue()); + } + + @Test + public void multiple_path_value() { + ReferenceExpression expr = new ReferenceExpression("address.state", STRING); + ExprValue actualValue = expr.resolve(tuple()); + + assertEquals(STRING, actualValue.type()); + assertEquals("WA", actualValue.stringValue()); + } + + @Test + public void not_exist_path() { + ReferenceExpression expr = new ReferenceExpression("missing_field", STRING); + ExprValue actualValue = expr.resolve(tuple()); + + assertTrue(actualValue.isMissing()); + } + + @Test + public void object_field_contain_dot() { + ReferenceExpression expr = new ReferenceExpression("address.local.state", STRING); + ExprValue actualValue = expr.resolve(tuple()); + + assertTrue(actualValue.isMissing()); + } + + @Test + public void innner_none_object_field_contain_dot() { + ReferenceExpression expr = new ReferenceExpression("address.project.year", INTEGER); + ExprValue actualValue = expr.resolve(tuple()); + + assertEquals(INTEGER, actualValue.type()); + assertEquals(1990, actualValue.integerValue()); + } + + /** + * { + * "name": "bob smith" + * "project.year": 1990, + * "project": { + * "year": 2020 + * }, + * "address": { + * "state": "WA", + * "city": "seattle" + * "project.year": 1990 + * }, + * "address.local": { + * "state": "WA", + * } + * } + */ + private ExprTupleValue tuple() { + ExprValue address = + ExprValueUtils.tupleValue(ImmutableMap.of("state", "WA", "city", "seattle", "project" + + ".year", 1990)); + ExprValue project = + ExprValueUtils.tupleValue(ImmutableMap.of("year", 2020)); + ExprValue addressLocal = + ExprValueUtils.tupleValue(ImmutableMap.of("state", "WA")); + ExprTupleValue tuple = ExprTupleValue.fromExprValueMap(ImmutableMap.of( + "name", new ExprStringValue("bob smith"), + "project.year", new ExprIntegerValue(1990), + "project", project, + "address", address, + "address.local", addressLocal + )); + return tuple; + } } \ No newline at end of file diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/conditional/cases/CaseClauseTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/conditional/cases/CaseClauseTest.java index de91e9c32c..18ebf91719 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/conditional/cases/CaseClauseTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/conditional/cases/CaseClauseTest.java @@ -77,7 +77,7 @@ void should_use_type_of_when_clause() { @Test void should_use_type_of_nonnull_when_or_else_clause() { - when(whenClause.type()).thenReturn(ExprCoreType.UNKNOWN); + when(whenClause.type()).thenReturn(ExprCoreType.UNDEFINED); Expression defaultResult = mock(Expression.class); when(defaultResult.type()).thenReturn(ExprCoreType.STRING); @@ -87,12 +87,12 @@ void should_use_type_of_nonnull_when_or_else_clause() { @Test void should_use_unknown_type_of_if_all_when_and_else_return_null() { - when(whenClause.type()).thenReturn(ExprCoreType.UNKNOWN); + when(whenClause.type()).thenReturn(ExprCoreType.UNDEFINED); Expression defaultResult = mock(Expression.class); - when(defaultResult.type()).thenReturn(ExprCoreType.UNKNOWN); + when(defaultResult.type()).thenReturn(ExprCoreType.UNDEFINED); CaseClause caseClause = new CaseClause(ImmutableList.of(whenClause), defaultResult); - assertEquals(ExprCoreType.UNKNOWN, caseClause.type()); + assertEquals(ExprCoreType.UNDEFINED, caseClause.type()); } @Test @@ -109,9 +109,9 @@ void should_return_all_result_types_including_default() { @Test void should_return_all_result_types_excluding_null_result() { - when(whenClause.type()).thenReturn(ExprCoreType.UNKNOWN); + when(whenClause.type()).thenReturn(ExprCoreType.UNDEFINED); Expression defaultResult = mock(Expression.class); - when(defaultResult.type()).thenReturn(ExprCoreType.UNKNOWN); + when(defaultResult.type()).thenReturn(ExprCoreType.UNDEFINED); CaseClause caseClause = new CaseClause(ImmutableList.of(whenClause), defaultResult); assertEquals( diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/DateTimeFunctionTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/DateTimeFunctionTest.java index 1bf450a208..d04a2c158f 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/DateTimeFunctionTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/DateTimeFunctionTest.java @@ -553,7 +553,7 @@ public void microsecond() { expression = dsl.microsecond(DSL.literal(new ExprTimestampValue("2020-08-17 01:02:03.000010"))); assertEquals(INTEGER, expression.type()); assertEquals(integerValue(10), expression.valueOf(env)); - assertEquals("microsecond(TIMESTAMP '2020-08-17 01:02:03.000010')", expression.toString()); + assertEquals("microsecond(TIMESTAMP '2020-08-17 01:02:03.00001')", expression.toString()); } @Test diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/IntervalClauseTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/IntervalClauseTest.java index ff7a7ea010..043f793b01 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/IntervalClauseTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/IntervalClauseTest.java @@ -16,9 +16,13 @@ package com.amazon.opendistroforelasticsearch.sql.expression.datetime; import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.intervalValue; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.missingValue; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.nullValue; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTERVAL; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.when; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException; @@ -39,6 +43,12 @@ public class IntervalClauseTest extends ExpressionTestBase { @Mock Environment env; + @Mock + Expression nullRef; + + @Mock + Expression missingRef; + @Test public void microsecond() { FunctionExpression expr = dsl.interval(DSL.literal(1), DSL.literal("microsecond")); @@ -114,4 +124,22 @@ public void to_string() { FunctionExpression expr = dsl.interval(DSL.literal(1), DSL.literal("day")); assertEquals("interval(1, \"day\")", expr.toString()); } + + @Test + public void null_value() { + when(nullRef.type()).thenReturn(INTEGER); + when(nullRef.valueOf(env)).thenReturn(nullValue()); + FunctionExpression expr = dsl.interval(nullRef, DSL.literal("day")); + assertEquals(INTERVAL, expr.type()); + assertEquals(nullValue(), expr.valueOf(env)); + } + + @Test + public void missing_value() { + when(missingRef.type()).thenReturn(INTEGER); + when(missingRef.valueOf(env)).thenReturn(missingValue()); + FunctionExpression expr = dsl.interval(missingRef, DSL.literal("day")); + assertEquals(INTERVAL, expr.type()); + assertEquals(missingValue(), expr.valueOf(env)); + } } diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/function/WideningTypeRuleTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/function/WideningTypeRuleTest.java index 0e614a4df5..76e07b8d36 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/function/WideningTypeRuleTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/function/WideningTypeRuleTest.java @@ -21,7 +21,7 @@ import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.LONG; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.SHORT; -import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.UNKNOWN; +import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.UNDEFINED; import static com.amazon.opendistroforelasticsearch.sql.data.type.WideningTypeRule.IMPOSSIBLE_WIDENING; import static com.amazon.opendistroforelasticsearch.sql.data.type.WideningTypeRule.TYPE_EQUAL; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -33,10 +33,9 @@ import com.google.common.collect.ImmutableTable; import com.google.common.collect.Lists; import com.google.common.collect.Table; -import java.util.Arrays; import java.util.List; -import java.util.stream.Collectors; import java.util.stream.Stream; +import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; @@ -60,12 +59,16 @@ class WideningTypeRuleTest { .put(LONG, FLOAT, 1) .put(LONG, DOUBLE, 2) .put(FLOAT, DOUBLE, 1) + .put(UNDEFINED, BYTE, 1) + .put(UNDEFINED, SHORT, 2) + .put(UNDEFINED, INTEGER, 3) + .put(UNDEFINED, LONG, 4) + .put(UNDEFINED, FLOAT, 5) + .put(UNDEFINED, DOUBLE, 6) .build(); private static Stream distanceArguments() { - List exprTypes = - Arrays.asList(ExprCoreType.values()).stream().filter(type -> type != UNKNOWN).collect( - Collectors.toList()); + List exprTypes = ExprCoreType.coreTypes(); return Lists.cartesianProduct(exprTypes, exprTypes).stream() .map(list -> { ExprCoreType type1 = list.get(0); @@ -81,9 +84,7 @@ private static Stream distanceArguments() { } private static Stream validMaxTypes() { - List exprTypes = - Arrays.asList(ExprCoreType.values()).stream().filter(type -> type != UNKNOWN).collect( - Collectors.toList()); + List exprTypes = ExprCoreType.coreTypes(); return Lists.cartesianProduct(exprTypes, exprTypes).stream() .map(list -> { ExprCoreType type1 = list.get(0); @@ -117,4 +118,13 @@ public void max(ExprCoreType v1, ExprCoreType v2, ExprCoreType expected) { assertEquals(expected, WideningTypeRule.max(v1, v2)); } } + + @Test + public void maxOfUndefinedAndOthersShouldBeTheOtherType() { + ExprCoreType.coreTypes().forEach(type -> + assertEquals(type, WideningTypeRule.max(type, UNDEFINED))); + ExprCoreType.coreTypes().forEach(type -> + assertEquals(type, WideningTypeRule.max(UNDEFINED, type))); + } + } \ No newline at end of file diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/UnaryPredicateOperatorTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/UnaryPredicateOperatorTest.java index bfdc097080..70cde0d886 100644 --- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/UnaryPredicateOperatorTest.java +++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/operator/predicate/UnaryPredicateOperatorTest.java @@ -23,15 +23,29 @@ import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_TRUE; import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.booleanValue; import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.BOOLEAN; +import static java.lang.Enum.valueOf; import static org.junit.jupiter.api.Assertions.assertEquals; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprNullValue; +import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue; import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils; +import com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType; import com.amazon.opendistroforelasticsearch.sql.expression.DSL; +import com.amazon.opendistroforelasticsearch.sql.expression.Expression; import com.amazon.opendistroforelasticsearch.sql.expression.ExpressionTestBase; import com.amazon.opendistroforelasticsearch.sql.expression.FunctionExpression; +import com.google.common.collect.Lists; +import java.lang.reflect.Array; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.ValueSource; class UnaryPredicateOperatorTest extends ExpressionTestBase { @@ -44,6 +58,118 @@ public void test_not(Boolean v) { assertEquals(String.format("not(%s)", v.toString()), not.toString()); } + private static Stream isNullArguments() { + ArrayList expressions = new ArrayList<>(); + expressions.add(DSL.literal("test")); + expressions.add(DSL.literal(100)); + expressions.add(DSL.literal("")); + expressions.add(DSL.literal(LITERAL_NULL)); + + return Lists.cartesianProduct(expressions, expressions).stream() + .map(list -> { + Expression e1 = list.get(0); + if (e1.valueOf(valueEnv()).isNull() + || e1.valueOf(valueEnv()).isMissing()) { + return Arguments.of(e1, DSL.literal(LITERAL_TRUE)); + } else { + return Arguments.of(e1, DSL.literal(LITERAL_FALSE)); + } + }); + } + + private static Stream ifNullArguments() { + ArrayList exprValueArrayList = new ArrayList<>(); + exprValueArrayList.add(DSL.literal(123)); + exprValueArrayList.add(DSL.literal(LITERAL_NULL)); + exprValueArrayList.add(DSL.literal(321)); + exprValueArrayList.add(DSL.literal(LITERAL_NULL)); + + return Lists.cartesianProduct(exprValueArrayList, exprValueArrayList).stream() + .map(list -> { + Expression e1 = list.get(0); + Expression e2 = list.get(1); + if (e1.valueOf(valueEnv()).value() == LITERAL_NULL.value() + || e1.valueOf(valueEnv()).value() == LITERAL_MISSING) { + return Arguments.of(e1, e2, e2); + } else { + return Arguments.of(e1, e2, e1); + } + }); + } + + private static Stream nullIfArguments() { + ArrayList exprValueArrayList = new ArrayList<>(); + exprValueArrayList.add(DSL.literal(123)); + exprValueArrayList.add(DSL.literal(321)); + + return Lists.cartesianProduct(exprValueArrayList, exprValueArrayList).stream() + .map(list -> { + Expression e1 = list.get(0); + Expression e2 = list.get(1); + + if (e1.equals(e2)) { + return Arguments.of(e1, e2, DSL.literal(LITERAL_NULL)); + } else { + return Arguments.of(e1, e2, e1); + } + }); + } + + private static Stream ifArguments() { + ArrayList exprValueArrayList = new ArrayList<>(); + exprValueArrayList.add(DSL.literal(LITERAL_TRUE)); + exprValueArrayList.add(DSL.literal(LITERAL_FALSE)); + exprValueArrayList.add(DSL.literal(LITERAL_NULL)); + exprValueArrayList.add(DSL.literal(LITERAL_MISSING)); + + return Lists.cartesianProduct(exprValueArrayList, exprValueArrayList).stream() + .map(list -> { + Expression e1 = list.get(0); + if (e1.valueOf(valueEnv()).value() == LITERAL_TRUE.value()) { + return Arguments.of(e1, DSL.literal("123"), DSL.literal("321"), DSL.literal("123")); + } else { + return Arguments.of(e1, DSL.literal("123"), DSL.literal("321"), DSL.literal("321")); + } + }); + } + + private static Stream exprIfNullArguments() { + ArrayList exprValues = new ArrayList<>(); + exprValues.add(LITERAL_NULL); + exprValues.add(LITERAL_MISSING); + exprValues.add(ExprValueUtils.integerValue(123)); + exprValues.add(ExprValueUtils.integerValue(456)); + + return Lists.cartesianProduct(exprValues, exprValues).stream() + .map(list -> { + ExprValue e1 = list.get(0); + ExprValue e2 = list.get(1); + if (e1.isNull() || e1.isMissing()) { + return Arguments.of(e1, e2, e2); + } else { + return Arguments.of(e1, e2, e1); + } + }); + } + + private static Stream exprNullIfArguments() { + ArrayList exprValues = new ArrayList<>(); + exprValues.add(LITERAL_NULL); + exprValues.add(LITERAL_MISSING); + exprValues.add(ExprValueUtils.integerValue(123)); + + return Lists.cartesianProduct(exprValues, exprValues).stream() + .map(list -> { + ExprValue e1 = list.get(0); + ExprValue e2 = list.get(1); + if (e1.equals(e2)) { + return Arguments.of(e1, e2, LITERAL_NULL); + } else { + return Arguments.of(e1, e2, e1); + } + }); + } + @Test public void test_not_null() { FunctionExpression expression = dsl.not(DSL.ref(BOOL_TYPE_NULL_VALUE_FIELD, BOOLEAN)); @@ -59,18 +185,18 @@ public void test_not_missing() { } @Test - public void is_null_predicate() { - FunctionExpression expression = dsl.isnull(DSL.literal(1)); + public void test_is_null_predicate() { + FunctionExpression expression = dsl.is_null(DSL.literal(1)); assertEquals(BOOLEAN, expression.type()); assertEquals(LITERAL_FALSE, expression.valueOf(valueEnv())); - expression = dsl.isnull(DSL.literal(ExprNullValue.of())); + expression = dsl.is_null(DSL.literal(ExprNullValue.of())); assertEquals(BOOLEAN, expression.type()); assertEquals(LITERAL_TRUE, expression.valueOf(valueEnv())); } @Test - public void is_not_null_predicate() { + public void test_is_not_null_predicate() { FunctionExpression expression = dsl.isnotnull(DSL.literal(1)); assertEquals(BOOLEAN, expression.type()); assertEquals(LITERAL_TRUE, expression.valueOf(valueEnv())); @@ -79,4 +205,41 @@ public void is_not_null_predicate() { assertEquals(BOOLEAN, expression.type()); assertEquals(LITERAL_FALSE, expression.valueOf(valueEnv())); } + + @ParameterizedTest + @MethodSource("isNullArguments") + public void test_isnull_predicate(Expression v1, Expression expected) { + assertEquals(expected.valueOf(valueEnv()), dsl.isnull(v1).valueOf(valueEnv())); + } + + @ParameterizedTest + @MethodSource("ifNullArguments") + public void test_ifnull_predicate(Expression v1, Expression v2, Expression expected) { + assertEquals(expected.valueOf(valueEnv()), dsl.ifnull(v1, v2).valueOf(valueEnv())); + } + + @ParameterizedTest + @MethodSource("nullIfArguments") + public void test_nullif_predicate(Expression v1, Expression v2, Expression expected) { + assertEquals(expected.valueOf(valueEnv()), dsl.nullif(v1, v2).valueOf(valueEnv())); + } + + @ParameterizedTest + @MethodSource("ifArguments") + public void test_if_predicate(Expression v1, Expression v2, Expression v3, Expression expected) { + assertEquals(expected.valueOf(valueEnv()), dsl.iffunction(v1, v2, v3).valueOf(valueEnv())); + } + + @ParameterizedTest + @MethodSource("exprIfNullArguments") + public void test_exprIfNull_predicate(ExprValue v1, ExprValue v2, ExprValue expected) { + assertEquals(expected.value(), UnaryPredicateOperator.exprIfNull(v1, v2).value()); + } + + @ParameterizedTest + @MethodSource("exprNullIfArguments") + public void test_exprNullIf_predicate(ExprValue v1, ExprValue v2, ExprValue expected) { + assertEquals(expected.value(), UnaryPredicateOperator.exprNullIf(v1, v2).value()); + } + } \ No newline at end of file diff --git a/docs/category.json b/docs/category.json index d570fdd449..fe695f89df 100644 --- a/docs/category.json +++ b/docs/category.json @@ -15,6 +15,7 @@ "experiment/ppl/cmd/stats.rst", "experiment/ppl/cmd/where.rst", "experiment/ppl/general/identifiers.rst", + "experiment/ppl/general/datatypes.rst", "experiment/ppl/functions/math.rst", "experiment/ppl/functions/datetime.rst", "experiment/ppl/functions/string.rst", diff --git a/docs/dev/NewSQLEngine.md b/docs/dev/NewSQLEngine.md new file mode 100644 index 0000000000..a9034ef39e --- /dev/null +++ b/docs/dev/NewSQLEngine.md @@ -0,0 +1,73 @@ +# An Introduction to the New SQL Query Engine + +--- +## 1.Motivations + +The current SQL query engine provides users the basic query capability for using familiar SQL rather than complex Elasticsearch DSL. Based on NLPchina ES-SQL, many new features have been added additionally, such as semantic analyzer, semi-structured data query support, Hash Join etc. However, as we looked into more advanced SQL features, challenges started emerging especially in terms of correctness and extensibility (see [Attributions](../attributions.md)). After thoughtful consideration, we decided to develop a new query engine to address all the problems met so far. + + +--- +## 2.What's New + +With the architecture and extensibility improved significantly, the following SQL features are able to be introduced in the new query engine: + +* [Identifiers](/docs/user/general/identifiers.rst): Support for identifier names with special characters +* [Data types](/docs/user/general/datatypes.rst): New data types such as date time and interval +* [Expressions](/docs/user/dql/expressions.rst): New expression system that can represent and evaluate complex expressions +* [SQL functions](/docs/user/dql/functions.rst): Many more string and date functions added +* [Basic queries](/docs/user/dql/basics.rst) + * Ordering by Aggregate Functions section + * NULLS FIRST/LAST in section Specifying Order for Null +* [Aggregations](/docs/user/dql/aggregations.rst): + * Aggregation over expression + * Selective aggregation by FILTER function +* [Complex queries](/docs/user/dql/complex.rst) + * Improvement on Subqueries in FROM clause +* [Window functions](/docs/user/dql/window.rst) + * Ranking window functions + * Aggregate window functions + +As for correctness, besides full coverage of unit and integration test, we developed a new comparison test framework to ensure correctness by comparing with other databases. Please find more details in [Testing](./Testing.md). + + +--- +## 3.What're Changed + +### 3.1 Breaking Changes + +Because of implementation changed internally, you can expect Explain output in a different format. For query protocol, there are slightly changes on two fields' value in the default response format: + +* **Schema**: Previously the `name` and `alias` value differed for different queries. For consistency, name is always the original text now and alias is its alias defined in SELECT clause or absent if none. +* **Total**: The `total` field represented how many documents matched in total no matter how many returned (indicated by `size` field). However, this field becomes meaningless because of post processing on DSL response in the new query engine. Thus, for now the total number is always same as size field. + +### 3.2 Limitations + +You can find all the limitations in [Limitations](/docs/user/limitations/limitations.rst). For these unsupported features, the query will be forwarded to the old query engine by fallback mechanism. To avoid impact on your side, normally you won't see any difference in a query response. If you want to check if and why your query falls back to be handled by old SQL engine, please explain your query and check Elasticsearch log for "Request is falling back to old SQL engine due to ...". + +Basically, here is a list of the features common though not supported in the new query engine yet: + +* **Cursor**: request with `fetch_size` parameter +* **JSON response format**: will not be supported anymore in the new engine +* **Nested field query**: including supports for object field or nested field query +* **JOINs**: including all types of join queries +* **Elasticsearch functions**: fulltext search, metric and bucket functions + +### 3.3 What if Something Wrong + +No panic! You can roll back to old query engine easily by a plugin setting change. Simply run the command to disable it by [plugin setting](/docs/user/admin/settings.rst#opendistro-sql-engine-new-enabled). Same as other cluster setting change, no need to restart Elasticsearch and the change will take effect on next incoming query. Later on please report the issue to us. + + +--- +## 4.How it's Implemented + +If you're interested in the new query engine, please find more details in [Develop Guide](../developing.rst), [Architecture](./Architecture.md) and other docs in the dev folder. + + +--- +## 5.What's Next + +As mentioned in section 3.2 Limitations, there are still very popular SQL features unsupported yet in the new query engine yet. In particular, the following items are on our roadmap with high priority: + +1. Object/Nested field queries +2. JOIN support +3. Elasticsearch functions diff --git a/docs/experiment/ppl/functions/condition.rst b/docs/experiment/ppl/functions/condition.rst index c367287df6..3ec153065e 100644 --- a/docs/experiment/ppl/functions/condition.rst +++ b/docs/experiment/ppl/functions/condition.rst @@ -22,13 +22,16 @@ Return type: BOOLEAN Example:: - od> source=accounts | where isnull(employer) | fields account_number, employer - fetched rows / total rows = 1/1 - +------------------+------------+ - | account_number | employer | - |------------------+------------| - | 18 | null | - +------------------+------------+ + od> source=accounts | eval result = isnull(employer) | fields result, employer, firstname + fetched rows / total rows = 4/4 + +----------+------------+-------------+ + | result | employer | firstname | + |----------+------------+-------------| + | False | Pyrami | Amber | + | False | Netagy | Hattie | + | False | Quility | Nanette | + | True | null | Dale | + +----------+------------+-------------+ ISNOTNULL --------- @@ -67,3 +70,114 @@ Example, the account 13 doesn't have email field:: | 13 | null | +------------------+---------+ +IFNULL +------ + +Description +>>>>>>>>>>> + +Usage: ifnull(field1, field2) return field2 if field1 is null. + +Argument type: all the supported data type, (NOTE : if two parameters has different type, you will fail semantic check.) + +Return type: any + +Example:: + + od> source=accounts | eval result = ifnull(employer, 'default') | fields result, employer, firstname + fetched rows / total rows = 4/4 + +----------+------------+-------------+ + | result | employer | firstname | + |----------+------------+-------------| + | Pyrami | Pyrami | Amber | + | Netagy | Netagy | Hattie | + | Quility | Quility | Nanette | + | default | null | Dale | + +----------+------------+-------------+ + +NULLIF +------ + +Description +>>>>>>>>>>> + +Usage: nullif(field1, field2) return null if two parameters are same, otherwiser return field1. + +Argument type: all the supported data type, (NOTE : if two parameters has different type, if two parameters has different type, you will fail semantic check) + +Return type: any + +Example:: + + od> source=accounts | eval result = nullif(employer, 'Pyrami') | fields result, employer, firstname + fetched rows / total rows = 4/4 + +----------+------------+-------------+ + | result | employer | firstname | + |----------+------------+-------------| + | null | Pyrami | Amber | + | Netagy | Netagy | Hattie | + | Quility | Quility | Nanette | + | null | null | Dale | + +----------+------------+-------------+ + + +ISNULL +------ + +Description +>>>>>>>>>>> + +Usage: isnull(field1, field2) return null if two parameters are same, otherwise return field1. + +Argument type: all the supported data type + +Return type: any + +Example:: + + od> source=accounts | eval result = isnull(employer) | fields result, employer, firstname + fetched rows / total rows = 4/4 + +----------+------------+-------------+ + | result | employer | firstname | + |----------+------------+-------------| + | False | Pyrami | Amber | + | False | Netagy | Hattie | + | False | Quility | Nanette | + | True | null | Dale | + +----------+------------+-------------+ + +IF +------ + +Description +>>>>>>>>>>> + +Usage: if(condition, expr1, expr2) return expr1 if condition is true, otherwiser return expr2. + +Argument type: all the supported data type, (NOTE : if expr1 and expr2 are different type, you will fail semantic check + +Return type: any + +Example:: + + od> source=accounts | eval result = if(true, firstname, lastname) | fields result, firstname, lastname + fetched rows / total rows = 4/4 + +----------+-------------+------------+ + | result | firstname | lastname | + |----------+-------------+------------| + | Amber | Amber | Duke | + | Hattie | Hattie | Bond | + | Nanette | Nanette | Bates | + | Dale | Dale | Adams | + +----------+-------------+------------+ + + od> source=accounts | eval result = if(false, firstname, lastname) | fields result, firstname, lastname + fetched rows / total rows = 4/4 + +----------+-------------+------------+ + | result | firstname | lastname | + |----------+-------------+------------| + | Duke | Amber | Duke | + | Bond | Hattie | Bond | + | Bates | Nanette | Bates | + | Adams | Dale | Adams | + +----------+-------------+------------+ diff --git a/docs/experiment/ppl/general/datatypes.rst b/docs/experiment/ppl/general/datatypes.rst index f5e0841bf4..c9ee529287 100644 --- a/docs/experiment/ppl/general/datatypes.rst +++ b/docs/experiment/ppl/general/datatypes.rst @@ -224,3 +224,185 @@ String Data Types A string is a sequence of characters enclosed in either single or double quotes. For example, both 'text' and "text" will be treated as string literal. + +Query Struct Data Types +======================= + +In PPL, the Struct Data Types corresponding to the `Object field type in Elasticsearch `_. The "." is used as the path selector when access the inner attribute of the struct data. + +Example: People +--------------- + +There are three fields in test index ``people``: 1) deep nested object field ``city``; 2) object field of array value ``account``; 3) nested field ``projects``:: + + { + "mappings": { + "properties": { + "city": { + "properties": { + "name": { + "type": "keyword" + }, + "location": { + "properties": { + "latitude": { + "type": "double" + } + } + } + } + }, + "account": { + "properties": { + "id": { + "type": "keyword" + } + } + }, + "projects": { + "type": "nested", + "properties": { + "name": { + "type": "keyword" + } + } + } + } + } + } + +Example: Employees +------------------ + +Here is the mapping for test index ``employees_nested``. Note that field ``projects`` is a nested field:: + + { + "mappings": { + "properties": { + "id": { + "type": "long" + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "projects": { + "type": "nested", + "properties": { + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + }, + "fielddata": true + }, + "started_year": { + "type": "long" + } + } + }, + "title": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + } + } + + +Result set:: + + { + "employees_nested" : [ + { + "id" : 3, + "name" : "Bob Smith", + "title" : null, + "projects" : [ + { + "name" : "AWS Redshift Spectrum querying", + "started_year" : 1990 + }, + { + "name" : "AWS Redshift security", + "started_year" : 1999 + }, + { + "name" : "AWS Aurora security", + "started_year" : 2015 + } + ] + }, + { + "id" : 4, + "name" : "Susan Smith", + "title" : "Dev Mgr", + "projects" : [ ] + }, + { + "id" : 6, + "name" : "Jane Smith", + "title" : "Software Eng 2", + "projects" : [ + { + "name" : "AWS Redshift security", + "started_year" : 1998 + }, + { + "name" : "AWS Hello security", + "started_year" : 2015, + "address" : [ + { + "city" : "Dallas", + "state" : "TX" + } + ] + } + ] + } + ] + } + + +Example 1: Select struct inner attribute +---------------------------------------- + +The example show fetch city (top level), city.name (second level), city.location.latitude (deeper level) struct type data from people results. + +PPL query:: + + od> source=people | fields city, city.name, city.location.latitude; + fetched rows / total rows = 1/1 + +-----------------------------------------------------+-------------+--------------------------+ + | city | city.name | city.location.latitude | + |-----------------------------------------------------+-------------+--------------------------| + | {'name': 'Seattle', 'location': {'latitude': 10.5}} | Seattle | 10.5 | + +-----------------------------------------------------+-------------+--------------------------+ + + +Example 2: Group by struct inner attribute +------------------------------------------ + +The example show group by object field inner attribute. + +PPL query:: + + od> source=people | stats count() by city.name; + fetched rows / total rows = 1/1 + +-----------+-------------+ + | count() | city.name | + |-----------+-------------| + | 1 | Seattle | + +-----------+-------------+ diff --git a/docs/user/admin/settings.rst b/docs/user/admin/settings.rst index cf64e5a685..0c0b144b9e 100644 --- a/docs/user/admin/settings.rst +++ b/docs/user/admin/settings.rst @@ -518,7 +518,7 @@ Description We are migrating existing functionalities to a new query engine under development. User can choose to enable the new engine if interested or disable if any issue found. -1. The default value is false. +1. The default value is true. 2. This setting is node scope. 3. This setting can be updated dynamically. @@ -532,7 +532,7 @@ SQL query:: >> curl -H 'Content-Type: application/json' -X PUT localhost:9200/_opendistro/_sql/settings -d '{ "transient" : { - "opendistro.sql.engine.new.enabled" : "true" + "opendistro.sql.engine.new.enabled" : "false" } }' @@ -546,7 +546,7 @@ Result set:: "sql" : { "engine" : { "new" : { - "enabled" : "true" + "enabled" : "false" } } } diff --git a/docs/user/beyond/partiql.rst b/docs/user/beyond/partiql.rst index adec1a64db..8954cfdd38 100644 --- a/docs/user/beyond/partiql.rst +++ b/docs/user/beyond/partiql.rst @@ -187,11 +187,12 @@ Before looking into how nested object field (tuple values) be queried, we need t | | | | | object or JSON array. | +-------------------------+---------------+-----------------------+---------------------------------------------+-------------------------+ | Selecting second level | Yes | No | Yes | | -| | | (null returned) | (or null returned if not in PartiQL syntax) | | +| | | (exception may) | (or null returned if not in PartiQL syntax) | | +| | | be thrown) | | | +-------------------------+---------------+-----------------------+---------------------------------------------+ PartiQL specification | | Selecting deeper levels | Yes | No | No | is followed | -| | | (null returned) | (exception may | | -| | | | be thrown) | | +| | | (exception may | (exception may | | +| | | be thrown) | be thrown) | | +-------------------------+---------------+-----------------------+---------------------------------------------+-------------------------+ Example 1: Selecting Top Level @@ -220,18 +221,6 @@ Selecting at deeper levels for object fields of regular value returns inner fiel | {'latitude': 10.5} | 10.5 | +--------------------+--------------------------+ -Example 3: Selecting Field of Array Value ------------------------------------------ - -Select deeper level for object fields of array value which returns ``NULL``. For example, because inner field ``accounts.id`` has three values instead of a tuple in this document, null is returned. Similarly, selecting inner field ``projects.name`` directly in nested field returns null:: - - od> SELECT accounts.id, projects.name FROM people; - fetched rows / total rows = 1/1 - +---------------+-----------------+ - | accounts.id | projects.name | - |---------------+-----------------| - | null | null | - +---------------+-----------------+ For selecting second level for nested fields, please read on and find more details in the following sections. diff --git a/docs/user/dql/functions.rst b/docs/user/dql/functions.rst index 9233084d4d..e07e354b97 100644 --- a/docs/user/dql/functions.rst +++ b/docs/user/dql/functions.rst @@ -1892,6 +1892,69 @@ Specifications: 1. IFNULL(ES_TYPE, ES_TYPE) -> ES_TYPE +Usage: return parameter2 if parameter1 is null, otherwise return parameter1 + +Argument type: Any + +Return type: Any (NOTE : if two parameters has different type, you will fail semantic check" + +Example One:: + + od> SELECT IFNULL(123, 321), IFNULL(321, 123) + fetched rows / total rows = 1/1 + +--------------------+--------------------+ + | IFNULL(123, 321) | IFNULL(321, 123) | + |--------------------+--------------------| + | 123 | 321 | + +--------------------+--------------------+ + +Example Two:: + + od> SELECT IFNULL(321, 1/0), IFNULL(1/0, 123) + fetched rows / total rows = 1/1 + +--------------------+--------------------+ + | IFNULL(321, 1/0) | IFNULL(1/0, 123) | + |--------------------+--------------------| + | 321 | 123 | + +--------------------+--------------------+ + +Example Three:: + + od> SELECT IFNULL(1/0, 1/0) + fetched rows / total rows = 1/1 + +--------------------+ + | IFNULL(1/0, 1/0) | + |--------------------| + | null | + +--------------------+ + + +NULLIF +------ + +Description +>>>>>>>>>>> + +Specifications: + +1. NULLIF(ES_TYPE, ES_TYPE) -> ES_TYPE + +Usage: return null if two parameters are same, otherwise return parameer1 + +Argument type: Any + +Return type: Any (NOTE : if two parametershas different type, you will fail semantic check") + +Example:: + + od> SELECT NULLIF(123, 123), NULLIF(321, 123), NULLIF(1/0, 321), NULLIF(321, 1/0), NULLIF(1/0, 1/0) + fetched rows / total rows = 1/1 + +--------------------+--------------------+--------------------+--------------------+--------------------+ + | NULLIF(123, 123) | NULLIF(321, 123) | NULLIF(1/0, 321) | NULLIF(321, 1/0) | NULLIF(1/0, 1/0) | + |--------------------+--------------------+--------------------+--------------------+--------------------| + | null | 321 | null | 321 | null | + +--------------------+--------------------+--------------------+--------------------+--------------------+ + ISNULL ------ @@ -1903,6 +1966,55 @@ Specifications: 1. ISNULL(ES_TYPE) -> INTEGER +Usage: return true if parameter is null, otherwise return false + +Argument type: Any + +Return type: boolean + +Example:: + + od> SELECT ISNULL(1/0), ISNULL(123) + fetched rows / total rows = 1/1 + +---------------+---------------+ + | ISNULL(1/0) | ISNULL(123) | + |---------------+---------------| + | True | False | + +---------------+---------------+ + +IF +------ + +Description +>>>>>>>>>>> + +Specifications: + +1. IF(condition, ES_TYPE1, ES_TYPE2) -> ES_TYPE1 or ES_TYPE2 + +Usage: if first parameter is true, return second parameter, otherwise return third one. + +Argument type: condition as BOOLEAN, second and third can by any type + +Return type: Any (NOTE : if parameters #2 and #3 has different type, you will fail semantic check" + +Example:: + + od> SELECT IF(100 > 200, '100', '200') + fetched rows / total rows = 1/1 + +-------------------------------+ + | IF(100 > 200, '100', '200') | + |-------------------------------| + | 200 | + +-------------------------------+ + + od> SELECT IF(200 > 100, '100', '200') + fetched rows / total rows = 1/1 + +-------------------------------+ + | IF(200 > 100, '100', '200') | + |-------------------------------| + | 100 | + +-------------------------------+ CASE ---- diff --git a/docs/user/general/datatypes.rst b/docs/user/general/datatypes.rst index 8ad7be0a96..768eb85064 100644 --- a/docs/user/general/datatypes.rst +++ b/docs/user/general/datatypes.rst @@ -103,9 +103,24 @@ The table below list the mapping between Elasticsearch Data Type, ODFE SQL Data | nested | array | STRUCT | +--------------------+---------------+-----------+ -Notes: Not all the ODFE SQL Type has correspond Elasticsearch Type. e.g. data and time. To use function which required such data type, user should explict convert the data type. +Notes: Not all the ODFE SQL Type has correspond Elasticsearch Type. e.g. data and time. To use function which required such data type, user should explicitly convert the data type. +Undefined Data Type +=================== + +The type of a null literal is special and different from any existing one. In this case, an ``UNDEFINED`` type is in use when the type cannot be inferred at "compile time" (during query parsing and analyzing). The corresponding SQL type is NULL according to JDBC specification. Because this undefined type is compatible with any other type by design, a null literal can be accepted as a valid operand or function argument. + +Here are examples for NULL literal and expressions with NULL literal involved:: + + od> SELECT NULL, NULL = NULL, 1 + NULL, LENGTH(NULL); + fetched rows / total rows = 1/1 + +--------+---------------+------------+----------------+ + | NULL | NULL = NULL | 1 + NULL | LENGTH(NULL) | + |--------+---------------+------------+----------------| + | null | null | null | null | + +--------+---------------+------------+----------------+ + Numeric Data Types ================== diff --git a/docs/user/index.rst b/docs/user/index.rst index d34326d949..e4a39b560b 100644 --- a/docs/user/index.rst +++ b/docs/user/index.rst @@ -23,6 +23,8 @@ Open Distro for Elasticsearch SQL enables you to extract insights out of Elastic - `Data Types `_ + - `Values `_ + * **Data Query Language** - `Expressions `_ diff --git a/doctest/test_docs.py b/doctest/test_docs.py index d73554b615..a0f46d3658 100644 --- a/doctest/test_docs.py +++ b/doctest/test_docs.py @@ -203,7 +203,4 @@ def load_tests(loader, suite, ignore): # randomize order of tests to make sure they don't depend on each other random.shuffle(tests) - # prepend a temporary doc to enable new engine so new SQL docs followed can pass - tests.insert(0, doc_suite('../docs/user/dql/newsql.rst')) - return DocTests(tests) diff --git a/elasticsearch/build.gradle b/elasticsearch/build.gradle index 00bfdfbfe4..fc499124f6 100644 --- a/elasticsearch/build.gradle +++ b/elasticsearch/build.gradle @@ -12,8 +12,8 @@ dependencies { compile project(':core') compile group: 'org.elasticsearch', name: 'elasticsearch', version: "${es_version}" compile "io.github.resilience4j:resilience4j-retry:1.5.0" - compile group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: '2.10.4' - compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.10.4' + compile group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: '2.10.5' + compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.10.5.1' compile group: 'org.json', name: 'json', version:'20180813' compileOnly group: 'org.elasticsearch.client', name: 'elasticsearch-rest-high-level-client', version: "${es_version}" diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ElasticsearchExecutionProtector.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ElasticsearchExecutionProtector.java index bec7d3150d..b017839616 100644 --- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ElasticsearchExecutionProtector.java +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ElasticsearchExecutionProtector.java @@ -77,7 +77,7 @@ public PhysicalPlan visitRename(RenameOperator node, Object context) { */ @Override public PhysicalPlan visitTableScan(TableScanOperator node, Object context) { - return new ResourceMonitorPlan(node, resourceMonitor); + return doProtect(node); } @Override @@ -111,10 +111,14 @@ public PhysicalPlan visitHead(HeadOperator node, Object context) { ); } + /** + * Decorate input node with {@link ResourceMonitorPlan} to avoid aggregate + * window function pre-loads too many data into memory in worst case. + */ @Override public PhysicalPlan visitWindow(WindowOperator node, Object context) { return new WindowOperator( - visitInput(node.getInput(), context), + doProtect(visitInput(node.getInput(), context)), node.getWindowFunction(), node.getWindowDefinition()); } @@ -124,11 +128,10 @@ public PhysicalPlan visitWindow(WindowOperator node, Object context) { */ @Override public PhysicalPlan visitSort(SortOperator node, Object context) { - return new ResourceMonitorPlan( + return doProtect( new SortOperator( visitInput(node.getInput(), context), - node.getSortList()), - resourceMonitor); + node.getSortList())); } /** @@ -155,4 +158,16 @@ PhysicalPlan visitInput(PhysicalPlan node, Object context) { return node.accept(this, context); } } + + private PhysicalPlan doProtect(PhysicalPlan node) { + if (isProtected(node)) { + return node; + } + return new ResourceMonitorPlan(node, resourceMonitor); + } + + private boolean isProtected(PhysicalPlan node) { + return (node instanceof ResourceMonitorPlan); + } + } diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ResourceMonitorPlan.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ResourceMonitorPlan.java index 0225a46974..112e8dd2c3 100644 --- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ResourceMonitorPlan.java +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ResourceMonitorPlan.java @@ -33,6 +33,12 @@ @RequiredArgsConstructor @EqualsAndHashCode public class ResourceMonitorPlan extends PhysicalPlan { + + /** + * How many method calls to delegate's next() to perform resource check once. + */ + public static final long NUMBER_OF_NEXT_CALL_TO_CHECK = 1000; + /** * Delegated PhysicalPlan. */ @@ -44,6 +50,13 @@ public class ResourceMonitorPlan extends PhysicalPlan { @ToString.Exclude private final ResourceMonitor monitor; + /** + * Count how many calls to delegate's next() already. + */ + @EqualsAndHashCode.Exclude + private long nextCallCount = 0L; + + @Override public R accept(PhysicalPlanNodeVisitor visitor, C context) { return delegate.accept(visitor, context); @@ -74,6 +87,10 @@ public boolean hasNext() { @Override public ExprValue next() { + boolean shouldCheck = (++nextCallCount % NUMBER_OF_NEXT_CALL_TO_CHECK == 0); + if (shouldCheck && !this.monitor.isHealthy()) { + throw new IllegalStateException("resource is not enough to load next row, quit."); + } return delegate.next(); } } diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessage.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessage.java index d09b043dbd..ece094d8f6 100644 --- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessage.java +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessage.java @@ -17,6 +17,7 @@ package com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.error; +import lombok.Getter; import org.elasticsearch.rest.RestStatus; import org.json.JSONObject; @@ -25,17 +26,23 @@ */ public class ErrorMessage { - protected Exception exception; + protected Throwable exception; - private int status; - private String type; - private String reason; - private String details; + private final int status; + + @Getter + private final String type; + + @Getter + private final String reason; + + @Getter + private final String details; /** * Error Message Constructor. */ - public ErrorMessage(Exception exception, int status) { + public ErrorMessage(Throwable exception, int status) { this.exception = exception; this.status = status; diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessageFactory.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessageFactory.java index e0538b9cf1..73d0f924dc 100644 --- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessageFactory.java +++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/response/error/ErrorMessageFactory.java @@ -31,7 +31,7 @@ public class ErrorMessageFactory { * @param status exception status code * @return error message */ - public static ErrorMessage createErrorMessage(Exception e, int status) { + public static ErrorMessage createErrorMessage(Throwable e, int status) { Throwable cause = unwrapCause(e); if (cause instanceof ElasticsearchException) { ElasticsearchException exception = (ElasticsearchException) cause; diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionProtectorTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionProtectorTest.java index 03af6f5641..0a38cc5740 100644 --- a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionProtectorTest.java +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionProtectorTest.java @@ -50,6 +50,7 @@ import com.amazon.opendistroforelasticsearch.sql.expression.aggregation.AvgAggregator; import com.amazon.opendistroforelasticsearch.sql.expression.aggregation.NamedAggregator; import com.amazon.opendistroforelasticsearch.sql.expression.window.WindowDefinition; +import com.amazon.opendistroforelasticsearch.sql.expression.window.aggregation.AggregateWindowFunction; import com.amazon.opendistroforelasticsearch.sql.expression.window.ranking.RankFunction; import com.amazon.opendistroforelasticsearch.sql.monitor.ResourceMonitor; import com.amazon.opendistroforelasticsearch.sql.planner.physical.PhysicalPlan; @@ -213,6 +214,50 @@ public void testProtectSortForWindowOperator() { windowDefinition))); } + @Test + public void testProtectWindowOperatorInput() { + NamedExpression avg = named(mock(AggregateWindowFunction.class)); + WindowDefinition windowDefinition = mock(WindowDefinition.class); + + assertEquals( + window( + resourceMonitor( + values()), + avg, + windowDefinition), + executionProtector.protect( + window( + values(), + avg, + windowDefinition))); + } + + @SuppressWarnings("unchecked") + @Test + public void testNotProtectWindowOperatorInputIfAlreadyProtected() { + NamedExpression avg = named(mock(AggregateWindowFunction.class)); + Pair sortItem = + ImmutablePair.of(DEFAULT_ASC, DSL.ref("age", INTEGER)); + WindowDefinition windowDefinition = + new WindowDefinition(emptyList(), ImmutableList.of(sortItem)); + + assertEquals( + window( + resourceMonitor( + sort( + values(emptyList()), + sortItem)), + avg, + windowDefinition), + executionProtector.protect( + window( + sort( + values(emptyList()), + sortItem), + avg, + windowDefinition))); + } + @Test public void testWithoutProtection() { Expression filterExpr = literal(ExprBooleanValue.of(true)); diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ResourceMonitorPlanTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ResourceMonitorPlanTest.java index de2e0d157b..c1fb77fe40 100644 --- a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ResourceMonitorPlanTest.java +++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ResourceMonitorPlanTest.java @@ -73,8 +73,26 @@ void openSuccess() { @Test void nextSuccess() { - monitorPlan.next(); - verify(plan, times(1)).next(); + when(resourceMonitor.isHealthy()).thenReturn(true); + + for (int i = 1; i <= 1000; i++) { + monitorPlan.next(); + } + verify(resourceMonitor, times(1)).isHealthy(); + verify(plan, times(1000)).next(); + } + + @Test + void nextExceedResourceLimit() { + when(resourceMonitor.isHealthy()).thenReturn(false); + + for (int i = 1; i < 1000; i++) { + monitorPlan.next(); + } + + IllegalStateException exception = + assertThrows(IllegalStateException.class, () -> monitorPlan.next()); + assertEquals("resource is not enough to load next row, quit.", exception.getMessage()); } @Test diff --git a/integ-test/build.gradle b/integ-test/build.gradle index 41f5f51b9c..6bf88ebbc4 100644 --- a/integ-test/build.gradle +++ b/integ-test/build.gradle @@ -28,6 +28,7 @@ configurations.all { // enforce 1.1.3, https://www.whitesourcesoftware.com/vulnerability-database/WS-2019-0379 resolutionStrategy.force 'commons-codec:commons-codec:1.13' resolutionStrategy.force 'com.google.guava:guava:29.0-jre' + resolutionStrategy.force 'com.fasterxml.jackson.core:jackson-core:2.10.5' } dependencies { @@ -80,6 +81,9 @@ integTest { systemProperty "user", System.getProperty("user") systemProperty "password", System.getProperty("password") + // Enable new SQL engine + systemProperty 'enableNewEngine', 'false' + // Set default query size limit systemProperty 'defaultQuerySizeLimit', '10000' @@ -108,9 +112,6 @@ task integTestWithNewEngine(type: RestIntegTestTask) { systemProperty "user", System.getProperty("user") systemProperty "password", System.getProperty("password") - // Enable new SQL engine - systemProperty 'enableNewEngine', 'true' - // Set default query size limit systemProperty 'defaultQuerySizeLimit', '10000' diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/ObjectFieldSelectIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/ObjectFieldSelectIT.java index a437179f98..9d510d42e8 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/ObjectFieldSelectIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/ObjectFieldSelectIT.java @@ -25,6 +25,7 @@ import com.amazon.opendistroforelasticsearch.sql.legacy.utils.StringUtils; import org.json.JSONArray; import org.json.JSONObject; +import org.junit.Ignore; import org.junit.Test; /** @@ -94,6 +95,8 @@ public void testSelectNestedFieldItself() { ); } + @Ignore("Issue 1017 track the multiple values for a field. Actually we should not expected " + + "return a array for object field") @Test public void testSelectObjectFieldOfArrayValuesItself() { JSONObject response = new JSONObject(query("SELECT accounts FROM %s")); diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/OrdinalAliasRewriterIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/OrdinalAliasRewriterIT.java index 3beb740a7e..286798b3d1 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/OrdinalAliasRewriterIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/OrdinalAliasRewriterIT.java @@ -21,7 +21,6 @@ import com.amazon.opendistroforelasticsearch.sql.legacy.utils.StringUtils; import java.io.IOException; import org.junit.Assume; -import org.junit.Ignore; import org.junit.Test; public class OrdinalAliasRewriterIT extends SQLIntegTestCase { diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/PrettyFormatResponseIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/PrettyFormatResponseIT.java index 7ade383945..bbe47c946f 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/PrettyFormatResponseIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/PrettyFormatResponseIT.java @@ -136,6 +136,8 @@ public void selectWrongField() throws IOException { assertThat(getDataRows(response).length(), equalTo(RESPONSE_DEFAULT_MAX_SIZE)); } + @Ignore("Issue 1019, Breaking Change, the keyword should not been allowed, semantic exception " + + "is expected") @Test public void selectKeyword() throws IOException { JSONObject response = executeQuery( diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLFunctionsIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLFunctionsIT.java index 85c785a5f3..fc9088a24a 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLFunctionsIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLFunctionsIT.java @@ -714,12 +714,13 @@ public void right() throws IOException { @Test public void ifFuncShouldPassJDBC() { + Assume.assumeTrue(isNewQueryEngineEabled()); JSONObject response = executeJdbcRequest( "SELECT IF(age > 30, 'True', 'False') AS Ages FROM " + TEST_INDEX_ACCOUNT + " WHERE age IS NOT NULL GROUP BY Ages"); - assertEquals("Ages", response.query("/schema/0/name")); + assertEquals("IF(age > 30, \'True\', \'False\')", response.query("/schema/0/name")); assertEquals("Ages", response.query("/schema/0/alias")); - assertEquals("double", response.query("/schema/0/type")); + assertEquals("keyword", response.query("/schema/0/type")); } @Test @@ -752,12 +753,13 @@ public void ifFuncWithNullInputAsConditionTest() throws IOException { @Test public void ifnullShouldPassJDBC() throws IOException { + Assume.assumeTrue(isNewQueryEngineEabled()); JSONObject response = executeJdbcRequest( "SELECT IFNULL(lastname, 'unknown') AS name FROM " + TEST_INDEX_ACCOUNT + " GROUP BY name"); - assertEquals("name", response.query("/schema/0/name")); + assertEquals("IFNULL(lastname, \'unknown\')", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); - assertEquals("double", response.query("/schema/0/type")); + assertEquals("keyword", response.query("/schema/0/type")); } @Test @@ -782,12 +784,13 @@ public void ifnullWithNullInputTest() throws IOException { @Test public void isnullShouldPassJDBC() { + Assume.assumeTrue(isNewQueryEngineEabled()); JSONObject response = executeJdbcRequest( - "SELECT ISNULL(lastname) AS name FROM " + TEST_INDEX_ACCOUNT + " GROUP BY name"); - assertEquals("name", response.query("/schema/0/name")); + "SELECT ISNULL(lastname) AS name FROM " + TEST_INDEX_ACCOUNT); + assertEquals("ISNULL(lastname)", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); - assertEquals("integer", response.query("/schema/0/type")); + assertEquals("boolean", response.query("/schema/0/type")); } @Test diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLIntegTestCase.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLIntegTestCase.java index 9f59d93da2..0732193b64 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLIntegTestCase.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/legacy/SQLIntegTestCase.java @@ -87,7 +87,7 @@ public void setUpIndices() throws Exception { initClient(); } - enableNewQueryEngine(); + configureNewQueryEngine(); resetQuerySizeLimit(); init(); } @@ -147,15 +147,15 @@ public static void cleanUpIndices() throws IOException { wipeAllClusterSettings(); } - private void enableNewQueryEngine() throws IOException { + private void configureNewQueryEngine() throws IOException { boolean isEnabled = isNewQueryEngineEabled(); - if (isEnabled) { - com.amazon.opendistroforelasticsearch.sql.util.TestUtils.enableNewQueryEngine(client()); + if (!isEnabled) { + com.amazon.opendistroforelasticsearch.sql.util.TestUtils.disableNewQueryEngine(client()); } } protected boolean isNewQueryEngineEabled() { - return Boolean.parseBoolean(System.getProperty("enableNewEngine", "false")); + return Boolean.parseBoolean(System.getProperty("enableNewEngine", "true")); } protected void setQuerySizeLimit(Integer limit) throws IOException { diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/DateTimeFunctionIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/DateTimeFunctionIT.java index a68780300d..70861919bb 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/DateTimeFunctionIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/DateTimeFunctionIT.java @@ -217,20 +217,44 @@ public void testMicrosecond() throws IOException { verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123456)); + // Explicit timestamp value with less than 6 microsecond digits + result = executeQuery(String.format( + "source=%s | eval f = microsecond(timestamp('2020-09-16 17:30:00.1234')) | fields f", TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "integer")); + verifySome(result.getJSONArray("datarows"), rows(123400)); + result = executeQuery(String.format( "source=%s | eval f = microsecond(time('17:30:00.000010')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(10)); + // Explicit time value with less than 6 microsecond digits + result = executeQuery(String.format( + "source=%s | eval f = microsecond(time('17:30:00.1234')) | fields f", TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "integer")); + verifySome(result.getJSONArray("datarows"), rows(123400)); + result = executeQuery(String.format( "source=%s | eval f = microsecond('2020-09-16 17:30:00.123456') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123456)); + // Implicit timestamp value with less than 6 microsecond digits + result = executeQuery(String.format( + "source=%s | eval f = microsecond('2020-09-16 17:30:00.1234') | fields f", TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "integer")); + verifySome(result.getJSONArray("datarows"), rows(123400)); + result = executeQuery(String.format( "source=%s | eval f = microsecond('17:30:00.000010') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(10)); + + // Implicit time value with less than 6 microsecond digits + result = executeQuery(String.format( + "source=%s | eval f = microsecond('17:30:00.1234') | fields f", TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "integer")); + verifySome(result.getJSONArray("datarows"), rows(123400)); } @Test diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/ObjectFieldOperateIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/ObjectFieldOperateIT.java new file mode 100644 index 0000000000..12d7eb71ec --- /dev/null +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/ObjectFieldOperateIT.java @@ -0,0 +1,90 @@ +/* + * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.ppl; + +import static com.amazon.opendistroforelasticsearch.sql.legacy.SQLIntegTestCase.Index.DEEP_NESTED; +import static com.amazon.opendistroforelasticsearch.sql.legacy.TestsConstants.TEST_INDEX_DEEP_NESTED; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.rows; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.schema; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifyDataRows; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifySchema; + +import java.io.IOException; +import org.json.JSONObject; +import org.junit.Test; + +public class ObjectFieldOperateIT extends PPLIntegTestCase { + + @Override + public void init() throws IOException { + loadIndex(DEEP_NESTED); + } + + @Test + public void select_object_field() throws IOException { + JSONObject result = executeQuery( + String.format("source=%s | " + + "fields city.name, city.location.latitude", + TEST_INDEX_DEEP_NESTED)); + verifySchema(result, + schema("city.name", "string"), + schema("city.location.latitude", "double")); + verifyDataRows(result, + rows("Seattle", 10.5)); + } + + @Test + public void compare_object_field_in_where() throws IOException { + JSONObject result = executeQuery( + String.format("source=%s " + + "| where city.name = 'Seattle' " + + "| fields city.name, city.location.latitude", + TEST_INDEX_DEEP_NESTED)); + verifySchema(result, + schema("city.name", "string"), + schema("city.location.latitude", "double")); + verifyDataRows(result, + rows("Seattle", 10.5)); + } + + @Test + public void group_object_field_in_stats() throws IOException { + JSONObject result = executeQuery( + String.format("source=%s " + + "| stats count() by city.name", + TEST_INDEX_DEEP_NESTED)); + verifySchema(result, + schema("count()", "integer"), + schema("city.name", "string")); + verifyDataRows(result, + rows(1, "Seattle")); + } + + @Test + public void sort_by_object_field() throws IOException { + JSONObject result = executeQuery( + String.format("source=%s " + + "| sort city.name" + + "| fields city.name, city.location.latitude", + TEST_INDEX_DEEP_NESTED)); + verifySchema(result, + schema("city.name", "string"), + schema("city.location.latitude", "double")); + verifyDataRows(result, + rows("Seattle", 10.5)); + } +} diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/AdminIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/AdminIT.java index 2b7101c2c7..0a21506621 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/AdminIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/AdminIT.java @@ -23,7 +23,6 @@ import com.amazon.opendistroforelasticsearch.sql.common.utils.StringUtils; import com.amazon.opendistroforelasticsearch.sql.legacy.SQLIntegTestCase; import com.amazon.opendistroforelasticsearch.sql.legacy.TestsConstants; -import com.amazon.opendistroforelasticsearch.sql.util.TestUtils; import com.google.common.io.Resources; import java.io.IOException; import java.net.URI; @@ -39,7 +38,6 @@ public class AdminIT extends SQLIntegTestCase { @Override public void init() throws Exception { super.init(); - TestUtils.enableNewQueryEngine(client()); loadIndex(Index.ACCOUNT); } diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/ConditionalIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/ConditionalIT.java new file mode 100644 index 0000000000..e6ae7606c5 --- /dev/null +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/ConditionalIT.java @@ -0,0 +1,235 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.sql; + +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_FALSE; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_NULL; +import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.LITERAL_TRUE; +import static com.amazon.opendistroforelasticsearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; +import static com.amazon.opendistroforelasticsearch.sql.legacy.TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.hitAny; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.kvInt; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.rows; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.schema; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifyDataRows; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifySchema; +import static org.hamcrest.Matchers.equalTo; + +import com.amazon.opendistroforelasticsearch.sql.legacy.SQLIntegTestCase; +import java.io.IOException; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.search.SearchHits; +import org.json.JSONObject; +import org.junit.Test; + +public class ConditionalIT extends SQLIntegTestCase { + + @Override + public void init() throws Exception { + super.init(); + loadIndex(Index.ACCOUNT); + loadIndex(Index.BANK_WITH_NULL_VALUES); + } + + @Test + public void ifnullShouldPassJDBC() throws IOException { + JSONObject response = executeJdbcRequest( + "SELECT IFNULL(lastname, 'unknown') AS name FROM " + TEST_INDEX_ACCOUNT + + " GROUP BY name"); + assertEquals("IFNULL(lastname, \'unknown\')", response.query("/schema/0/name")); + assertEquals("name", response.query("/schema/0/alias")); + assertEquals("keyword", response.query("/schema/0/type")); + } + + @Test + public void ifnullWithNullInputTest() { + JSONObject response = new JSONObject(executeQuery( + "SELECT IFNULL(null, firstname) as IFNULL1 ," + + " IFNULL(firstname, null) as IFNULL2 ," + + " IFNULL(null, null) as IFNULL3 " + + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES + + " WHERE balance is null limit 2", "jdbc")); + + verifySchema(response, + schema("IFNULL(null, firstname)", "IFNULL1", "keyword"), + schema("IFNULL(firstname, null)", "IFNULL2", "keyword"), + schema("IFNULL(null, null)", "IFNULL3", "byte")); + verifyDataRows(response, + rows("Hattie", "Hattie", LITERAL_NULL.value()), + rows( "Elinor", "Elinor", LITERAL_NULL.value()) + ); + } + + @Test + public void ifnullWithMissingInputTest() { + JSONObject response = new JSONObject(executeQuery( + "SELECT IFNULL(balance, 100) as IFNULL1, " + + " IFNULL(200, balance) as IFNULL2, " + + " IFNULL(balance, balance) as IFNULL3 " + + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES + + " WHERE balance is null limit 3", "jdbc")); + verifySchema(response, + schema("IFNULL(balance, 100)", "IFNULL1", "long"), + schema("IFNULL(200, balance)", "IFNULL2", "long"), + schema("IFNULL(balance, balance)", "IFNULL3", "long")); + verifyDataRows(response, + rows(100, 200, null), + rows(100, 200, null), + rows(100, 200, null) + ); + } + + @Test + public void nullifShouldPassJDBC() throws IOException { + JSONObject response = executeJdbcRequest( + "SELECT NULLIF(lastname, 'unknown') AS name FROM " + TEST_INDEX_ACCOUNT); + assertEquals("NULLIF(lastname, \'unknown\')", response.query("/schema/0/name")); + assertEquals("name", response.query("/schema/0/alias")); + assertEquals("keyword", response.query("/schema/0/type")); + } + + @Test + public void nullifWithNotNullInputTestOne(){ + JSONObject response = new JSONObject(executeQuery( + "SELECT NULLIF(firstname, 'Amber JOHnny') as testnullif " + + "FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES + + " limit 2 ", "jdbc")); + verifySchema(response, + schema("NULLIF(firstname, 'Amber JOHnny')", "testnullif", "keyword")); + verifyDataRows(response, + rows(LITERAL_NULL.value()), + rows("Hattie") + ); + } + + @Test + public void nullifWithNullInputTest() { + JSONObject response = new JSONObject(executeQuery( + "SELECT NULLIF(1/0, 123) as nullif1 ," + + " NULLIF(123, 1/0) as nullif2 ," + + " NULLIF(1/0, 1/0) as nullif3 " + + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES + + " WHERE balance is null limit 1", "jdbc")); + verifySchema(response, + schema("NULLIF(1/0, 123)", "nullif1", "integer"), + schema("NULLIF(123, 1/0)", "nullif2", "integer"), + schema("NULLIF(1/0, 1/0)", "nullif3", "integer")); + verifyDataRows(response, + rows(LITERAL_NULL.value(), 123, LITERAL_NULL.value() + ) + ); + } + + @Test + public void isnullShouldPassJDBC() throws IOException { + JSONObject response = executeJdbcRequest( + "SELECT ISNULL(lastname) AS name FROM " + TEST_INDEX_ACCOUNT); + assertEquals("ISNULL(lastname)", response.query("/schema/0/name")); + assertEquals("name", response.query("/schema/0/alias")); + assertEquals("boolean", response.query("/schema/0/type")); + } + + @Test + public void isnullWithNotNullInputTest() throws IOException { + assertThat( + executeQuery("SELECT ISNULL('elastic') AS isnull FROM " + TEST_INDEX_ACCOUNT), + hitAny(kvInt("/fields/isnull/0", equalTo(0))) + ); + assertThat( + executeQuery("SELECT ISNULL('') AS isnull FROM " + TEST_INDEX_ACCOUNT), + hitAny(kvInt("/fields/isnull/0", equalTo(0))) + ); + } + + @Test + public void isnullWithNullInputTest() { + JSONObject response = new JSONObject(executeQuery( + "SELECT ISNULL(1/0) as ISNULL1 ," + + " ISNULL(firstname) as ISNULL2 " + + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES + + " WHERE balance is null limit 2", "jdbc")); + verifySchema(response, + schema("ISNULL(1/0)", "ISNULL1", "boolean"), + schema("ISNULL(firstname)", "ISNULL2", "boolean")); + verifyDataRows(response, + rows(LITERAL_TRUE.value(), LITERAL_FALSE.value()), + rows(LITERAL_TRUE.value(), LITERAL_FALSE.value()) + ); + } + + @Test + public void isnullWithMathExpr() throws IOException{ + assertThat( + executeQuery("SELECT ISNULL(1+1) AS isnull FROM " + TEST_INDEX_ACCOUNT), + hitAny(kvInt("/fields/isnull/0", equalTo(0))) + ); + assertThat( + executeQuery("SELECT ISNULL(1+1*1/0) AS isnull FROM " + TEST_INDEX_ACCOUNT), + hitAny(kvInt("/fields/isnull/0", equalTo(1))) + ); + } + + @Test + public void ifShouldPassJDBC() throws IOException { + JSONObject response = executeJdbcRequest( + "SELECT IF(2 > 0, \'hello\', \'world\') AS name FROM " + TEST_INDEX_ACCOUNT); + assertEquals("IF(2 > 0, \'hello\', \'world\')", response.query("/schema/0/name")); + assertEquals("name", response.query("/schema/0/alias")); + assertEquals("keyword", response.query("/schema/0/type")); + } + + @Test + public void ifWithTrueAndFalseCondition() throws IOException { + JSONObject response = new JSONObject(executeQuery( + "SELECT IF(2 < 0, firstname, lastname) as IF0, " + + " IF(2 > 0, firstname, lastname) as IF1, " + + " firstname as IF2, " + + " lastname as IF3 " + + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES + + " limit 2 ", "jdbc" )); + verifySchema(response, + schema("IF(2 < 0, firstname, lastname)", "IF0", "keyword"), + schema("IF(2 > 0, firstname, lastname)", "IF1", "keyword"), + schema("firstname", "IF2", "text"), + schema("lastname", "IF3", "keyword") + ); + verifyDataRows(response, + rows("Duke Willmington", "Amber JOHnny", "Amber JOHnny", "Duke Willmington"), + rows("Bond", "Hattie", "Hattie", "Bond") + ); + + } + + private SearchHits query(String query) throws IOException { + final String rsp = executeQueryWithStringOutput(query); + + final XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + rsp); + return SearchResponse.fromXContent(parser).getHits(); + } + + private JSONObject executeJdbcRequest(String query) { + return new JSONObject(executeQuery(query, "jdbc")); + } +} diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/DateTimeFunctionIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/DateTimeFunctionIT.java index 8edd2b6c9a..418fecbd35 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/DateTimeFunctionIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/DateTimeFunctionIT.java @@ -15,18 +15,16 @@ package com.amazon.opendistroforelasticsearch.sql.sql; +import static com.amazon.opendistroforelasticsearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; import static com.amazon.opendistroforelasticsearch.sql.legacy.plugin.RestSqlAction.QUERY_API_ENDPOINT; import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.rows; import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.schema; import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifyDataRows; import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifySchema; -import static com.amazon.opendistroforelasticsearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; import static com.amazon.opendistroforelasticsearch.sql.util.TestUtils.getResponseBody; -import com.amazon.opendistroforelasticsearch.sql.ast.expression.In; import com.amazon.opendistroforelasticsearch.sql.common.utils.StringUtils; import com.amazon.opendistroforelasticsearch.sql.legacy.SQLIntegTestCase; -import com.amazon.opendistroforelasticsearch.sql.util.TestUtils; import java.io.IOException; import java.util.Locale; import org.elasticsearch.client.Request; @@ -40,7 +38,6 @@ public class DateTimeFunctionIT extends SQLIntegTestCase { @Override public void init() throws Exception { super.init(); - TestUtils.enableNewQueryEngine(client()); loadIndex(Index.BANK); } @@ -251,17 +248,38 @@ public void testMicrosecond() throws IOException { schema("microsecond(timestamp('2020-09-16 17:30:00.123456'))", null, "integer")); verifyDataRows(result, rows(123456)); + // Explicit timestamp value with less than 6 microsecond digits + result = executeQuery("select microsecond(timestamp('2020-09-16 17:30:00.1234'))"); + verifySchema(result, + schema("microsecond(timestamp('2020-09-16 17:30:00.1234'))", null, "integer")); + verifyDataRows(result, rows(123400)); + result = executeQuery("select microsecond(time('17:30:00.000010'))"); verifySchema(result, schema("microsecond(time('17:30:00.000010'))", null, "integer")); verifyDataRows(result, rows(10)); + // Explicit time value with less than 6 microsecond digits + result = executeQuery("select microsecond(time('17:30:00.1234'))"); + verifySchema(result, schema("microsecond(time('17:30:00.1234'))", null, "integer")); + verifyDataRows(result, rows(123400)); + result = executeQuery("select microsecond('2020-09-16 17:30:00.123456')"); verifySchema(result, schema("microsecond('2020-09-16 17:30:00.123456')", null, "integer")); verifyDataRows(result, rows(123456)); + // Implicit timestamp value with less than 6 microsecond digits + result = executeQuery("select microsecond('2020-09-16 17:30:00.1234')"); + verifySchema(result, schema("microsecond('2020-09-16 17:30:00.1234')", null, "integer")); + verifyDataRows(result, rows(123400)); + result = executeQuery("select microsecond('17:30:00.000010')"); verifySchema(result, schema("microsecond('17:30:00.000010')", null, "integer")); verifyDataRows(result, rows(10)); + + // Implicit time value with less than 6 microsecond digits + result = executeQuery("select microsecond('17:30:00.1234')"); + verifySchema(result, schema("microsecond('17:30:00.1234')", null, "integer")); + verifyDataRows(result, rows(123400)); } @Test diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/ExpressionIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/ExpressionIT.java index 754a274f50..f99c0f2c8f 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/ExpressionIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/ExpressionIT.java @@ -21,7 +21,6 @@ import static org.hamcrest.Matchers.is; import com.amazon.opendistroforelasticsearch.sql.legacy.RestIntegTestCase; -import com.amazon.opendistroforelasticsearch.sql.util.TestUtils; import java.io.IOException; import java.util.Locale; import java.util.function.Function; @@ -31,7 +30,6 @@ import org.elasticsearch.client.ResponseException; import org.junit.Ignore; import org.junit.Rule; -import org.junit.Test; import org.junit.rules.ExpectedException; /** @@ -48,7 +46,6 @@ public class ExpressionIT extends RestIntegTestCase { @Override protected void init() throws Exception { super.init(); - TestUtils.enableNewQueryEngine(client()); } public ResponseExceptionAssertion expectResponseException() { diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/MathematicalFunctionIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/MathematicalFunctionIT.java index e511921003..9f235dfc7a 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/MathematicalFunctionIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/MathematicalFunctionIT.java @@ -24,7 +24,6 @@ import static com.amazon.opendistroforelasticsearch.sql.util.TestUtils.getResponseBody; import com.amazon.opendistroforelasticsearch.sql.legacy.SQLIntegTestCase; -import com.amazon.opendistroforelasticsearch.sql.util.TestUtils; import java.io.IOException; import java.util.Locale; import org.elasticsearch.client.Request; @@ -38,7 +37,6 @@ public class MathematicalFunctionIT extends SQLIntegTestCase { @Override public void init() throws Exception { super.init(); - TestUtils.enableNewQueryEngine(client()); loadIndex(Index.BANK); } diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/MetricsIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/MetricsIT.java index 473fe4402b..81ddf5549c 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/MetricsIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/MetricsIT.java @@ -20,7 +20,6 @@ import com.amazon.opendistroforelasticsearch.sql.legacy.SQLIntegTestCase; import com.amazon.opendistroforelasticsearch.sql.legacy.metrics.MetricName; -import com.amazon.opendistroforelasticsearch.sql.util.TestUtils; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; @@ -38,7 +37,6 @@ public class MetricsIT extends SQLIntegTestCase { @Override protected void init() throws Exception { loadIndex(Index.BANK); - TestUtils.enableNewQueryEngine(client()); } @Test diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/NullLiteralIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/NullLiteralIT.java new file mode 100644 index 0000000000..a21fc286ac --- /dev/null +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/NullLiteralIT.java @@ -0,0 +1,71 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + * + */ + +package com.amazon.opendistroforelasticsearch.sql.sql; + +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.rows; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.schema; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifyDataRows; +import static com.amazon.opendistroforelasticsearch.sql.util.MatcherUtils.verifySchema; + +import com.amazon.opendistroforelasticsearch.sql.legacy.SQLIntegTestCase; +import org.json.JSONObject; +import org.junit.Test; + +/** + * This manual IT for NULL literal cannot be replaced with comparison test because other database + * has different type for expression with NULL involved, such as NULL rather than concrete type + * inferred like what we do in core engine. + */ +public class NullLiteralIT extends SQLIntegTestCase { + + @Test + public void testNullLiteralSchema() { + verifySchema( + query("SELECT NULL, ABS(NULL), 1 + NULL, NULL + 1.0"), + schema("NULL", "undefined"), + schema("ABS(NULL)", "byte"), + schema("1 + NULL", "integer"), + schema("NULL + 1.0", "double")); + } + + @Test + public void testNullLiteralInOperator() { + verifyDataRows( + query("SELECT NULL = NULL, NULL AND TRUE"), + rows(null, null)); + } + + @Test + public void testNullLiteralInFunction() { + verifyDataRows( + query("SELECT ABS(NULL), POW(2, FLOOR(NULL))"), + rows(null, null)); + } + + @Test + public void testNullLiteralInInterval() { + verifyDataRows( + query("SELECT INTERVAL NULL DAY, INTERVAL 60 * 60 * 24 * (NULL - FLOOR(NULL)) SECOND"), + rows(null, null) + ); + } + + private JSONObject query(String sql) { + return new JSONObject(executeQuery(sql, "jdbc")); + } + +} diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLCorrectnessIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLCorrectnessIT.java index e069141d4b..da25c4ed28 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLCorrectnessIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/SQLCorrectnessIT.java @@ -16,7 +16,6 @@ package com.amazon.opendistroforelasticsearch.sql.sql; -import com.amazon.opendistroforelasticsearch.sql.util.TestUtils; import com.google.common.io.Resources; import java.io.IOException; import java.nio.file.Files; @@ -37,7 +36,6 @@ public class SQLCorrectnessIT extends CorrectnessTestBase { @Override protected void init() throws Exception { super.init(); - TestUtils.enableNewQueryEngine(client()); } @Test diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/TextFunctionIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/TextFunctionIT.java index a096b5b201..37210d5d09 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/TextFunctionIT.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/TextFunctionIT.java @@ -23,7 +23,6 @@ import static com.amazon.opendistroforelasticsearch.sql.util.TestUtils.getResponseBody; import com.amazon.opendistroforelasticsearch.sql.legacy.SQLIntegTestCase; -import com.amazon.opendistroforelasticsearch.sql.util.TestUtils; import java.io.IOException; import java.util.Locale; import org.elasticsearch.client.Request; @@ -37,7 +36,6 @@ public class TextFunctionIT extends SQLIntegTestCase { @Override public void init() throws Exception { super.init(); - TestUtils.enableNewQueryEngine(client()); } void verifyQuery(String query, String type, String output) throws IOException { diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/util/TestUtils.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/util/TestUtils.java index f68dd74456..b38f5a1efa 100644 --- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/util/TestUtils.java +++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/util/TestUtils.java @@ -853,11 +853,11 @@ public static List> getPermutations(final List items) { } /** - * Enable new query engine which is disabled by default for now. + * Disable new query engine which is enabled by default. */ - public static void enableNewQueryEngine(RestClient client) throws IOException { + public static void disableNewQueryEngine(RestClient client) throws IOException { Request request = new Request("PUT", SETTINGS_API_ENDPOINT); - request.setJsonEntity("{\"transient\" : {\"opendistro.sql.engine.new.enabled\" : \"true\"}}"); + request.setJsonEntity("{\"transient\" : {\"opendistro.sql.engine.new.enabled\" : \"false\"}}"); RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); restOptionsBuilder.addHeader("Content-Type", "application/json"); diff --git a/integ-test/src/test/resources/correctness/expressions/conditionals.txt b/integ-test/src/test/resources/correctness/expressions/conditionals.txt index 18ded14bfa..76c3562a01 100644 --- a/integ-test/src/test/resources/correctness/expressions/conditionals.txt +++ b/integ-test/src/test/resources/correctness/expressions/conditionals.txt @@ -13,3 +13,6 @@ CASE WHEN 'test' = 'hello world' THEN 'Hello' WHEN 1.0 = 2.0 THEN 'One' ELSE TRI CASE 1 WHEN 1 THEN 'One' ELSE NULL END AS cases CASE 1 WHEN 1 THEN 'One' WHEN 2 THEN 'Two' ELSE NULL END AS cases CASE 2 WHEN 1 THEN NULL WHEN 2 THEN 'Two' ELSE NULL END AS cases +IFNULL(null, AvgTicketPrice) from kibana_sample_data_flights +IFNULL(AvgTicketPrice, 100) from kibana_sample_data_flights +IFNULL(AvgTicketPrice, null) from kibana_sample_data_flights \ No newline at end of file diff --git a/integ-test/src/test/resources/correctness/expressions/literals.txt b/integ-test/src/test/resources/correctness/expressions/literals.txt index c56aa9ef05..b70fa41e83 100644 --- a/integ-test/src/test/resources/correctness/expressions/literals.txt +++ b/integ-test/src/test/resources/correctness/expressions/literals.txt @@ -1,3 +1,5 @@ +null +NULL 1 true -4.567 diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/SqlSettings.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/SqlSettings.java index 7bb9fe7261..346b4b8423 100644 --- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/SqlSettings.java +++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/plugin/SqlSettings.java @@ -57,7 +57,7 @@ public class SqlSettings { public SqlSettings() { Map> settings = new HashMap<>(); settings.put(SQL_ENABLED, Setting.boolSetting(SQL_ENABLED, true, NodeScope, Dynamic)); - settings.put(SQL_NEW_ENGINE_ENABLED, Setting.boolSetting(SQL_NEW_ENGINE_ENABLED, false, NodeScope, Dynamic)); + settings.put(SQL_NEW_ENGINE_ENABLED, Setting.boolSetting(SQL_NEW_ENGINE_ENABLED, true, NodeScope, Dynamic)); settings.put(QUERY_SLOWLOG, Setting.intSetting(QUERY_SLOWLOG, 2, NodeScope, Dynamic)); settings.put(QUERY_RESPONSE_FORMAT, Setting.simpleString(QUERY_RESPONSE_FORMAT, Format.JDBC.getFormatName(), NodeScope, Dynamic)); diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/utils/QueryDataAnonymizer.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/utils/QueryDataAnonymizer.java index b23798274b..04a351edfa 100644 --- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/utils/QueryDataAnonymizer.java +++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/utils/QueryDataAnonymizer.java @@ -47,7 +47,7 @@ public static String anonymizeData(String query) { .replaceAll("false", "boolean_literal") .replaceAll("[\\n][\\t]+", " "); } catch (Exception e) { - LOG.error("Caught an exception when removing sensitive data", e); + LOG.warn("Caught an exception when anonymizing sensitive data"); resultQuery = query; } return resultQuery; diff --git a/plugin/build.gradle b/plugin/build.gradle index 1fb0bf9cfc..2ef6a9de6c 100644 --- a/plugin/build.gradle +++ b/plugin/build.gradle @@ -15,7 +15,7 @@ repositories { } esplugin { - name 'opendistro_sql' + name 'opendistro-sql' description 'Open Distro for Elasticsearch SQL' classname 'com.amazon.opendistroforelasticsearch.sql.plugin.SQLPlugin' licenseFile rootProject.file("LICENSE.txt") @@ -31,6 +31,7 @@ thirdPartyAudit.enabled = false configurations.all { // conflict with spring-jcl exclude group: "commons-logging", module: "commons-logging" + resolutionStrategy.force 'com.fasterxml.jackson.core:jackson-core:2.10.5' // enforce 1.1.3, https://www.whitesourcesoftware.com/vulnerability-database/WS-2019-0379 resolutionStrategy.force 'commons-codec:commons-codec:1.13' resolutionStrategy.force 'com.google.guava:guava:29.0-jre' @@ -120,13 +121,27 @@ afterEvaluate { buildRpm { arch = 'NOARCH' - archiveName "${packageName}-${version}.rpm" dependsOn 'assemble' + finalizedBy 'renameRpm' + task renameRpm(type: Copy) { + from("$buildDir/distributions") + into("$buildDir/distributions") + include archiveName + rename archiveName, "${packageName}-${version}.rpm" + doLast { delete file("$buildDir/distributions/$archiveName") } + } } buildDeb { - arch = 'amd64' - archiveName "${packageName}-${version}.deb" + arch = 'all' dependsOn 'assemble' + finalizedBy 'renameDeb' + task renameDeb(type: Copy) { + from("$buildDir/distributions") + into("$buildDir/distributions") + include archiveName + rename archiveName, "${packageName}-${version}.deb" + doLast { delete file("$buildDir/distributions/$archiveName") } + } } } diff --git a/ppl/src/main/antlr/OpenDistroPPLLexer.g4 b/ppl/src/main/antlr/OpenDistroPPLLexer.g4 index 0869f15911..9866085710 100644 --- a/ppl/src/main/antlr/OpenDistroPPLLexer.g4 +++ b/ppl/src/main/antlr/OpenDistroPPLLexer.g4 @@ -235,6 +235,12 @@ LIKE: 'LIKE'; ISNULL: 'ISNULL'; ISNOTNULL: 'ISNOTNULL'; +// FLOWCONTROL FUNCTIONS +IFNULL: 'IFNULL'; +NULLIF: 'NULLIF'; +IF: 'IF'; + + // LITERALS AND VALUES //STRING_LITERAL: DQUOTA_STRING | SQUOTA_STRING | BQUOTA_STRING; ID: ID_LITERAL; diff --git a/ppl/src/main/antlr/OpenDistroPPLParser.g4 b/ppl/src/main/antlr/OpenDistroPPLParser.g4 index b23bcdd062..a4fe7f0938 100644 --- a/ppl/src/main/antlr/OpenDistroPPLParser.g4 +++ b/ppl/src/main/antlr/OpenDistroPPLParser.g4 @@ -42,7 +42,7 @@ whereCommand ; fieldsCommand - : FIELDS (PLUS | MINUS)? wcFieldList + : FIELDS (PLUS | MINUS)? fieldList ; renameCommand @@ -254,7 +254,7 @@ dateAndTimeFunctionBase /** condition function return boolean value */ conditionFunctionBase : LIKE - | ISNULL | ISNOTNULL + | IF | ISNULL | ISNOTNULL | IFNULL | NULLIF ; textFunctionBase @@ -312,11 +312,11 @@ valueList ; qualifiedName - : ident #identsAsQualifiedName + : ident (DOT ident)* #identsAsQualifiedName ; wcQualifiedName - : wildcard #identsAsWildcardQualifiedName + : wildcard (DOT wildcard)* #identsAsWildcardQualifiedName ; ident diff --git a/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/parser/AstBuilder.java b/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/parser/AstBuilder.java index 8a8c011190..6e56c80509 100644 --- a/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/parser/AstBuilder.java +++ b/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/parser/AstBuilder.java @@ -119,8 +119,8 @@ public UnresolvedPlan visitWhereCommand(WhereCommandContext ctx) { @Override public UnresolvedPlan visitFieldsCommand(FieldsCommandContext ctx) { return new Project( - ctx.wcFieldList() - .wcFieldExpression() + ctx.fieldList() + .fieldExpression() .stream() .map(this::visitExpression) .collect(Collectors.toList()), diff --git a/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/parser/AstExpressionBuilder.java b/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/parser/AstExpressionBuilder.java index 0cb16a4121..4d1bf4a23c 100644 --- a/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/parser/AstExpressionBuilder.java +++ b/ppl/src/main/java/com/amazon/opendistroforelasticsearch/sql/ppl/parser/AstExpressionBuilder.java @@ -15,6 +15,7 @@ package com.amazon.opendistroforelasticsearch.sql.ppl.parser; +import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.qualifiedName; import static com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionName.IS_NOT_NULL; import static com.amazon.opendistroforelasticsearch.sql.expression.function.BuiltinFunctionName.IS_NULL; import static com.amazon.opendistroforelasticsearch.sql.ppl.antlr.parser.OpenDistroPPLParser.BinaryArithmeticContext; @@ -67,9 +68,11 @@ import com.google.common.collect.ImmutableMap; import java.util.Arrays; import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.RuleContext; /** * Class of building AST Expression nodes. @@ -171,7 +174,7 @@ public UnresolvedExpression visitWcFieldExpression(WcFieldExpressionContext ctx) @Override public UnresolvedExpression visitSortField(SortFieldContext ctx) { return new Field( - ctx.sortFieldExpression().fieldExpression().getText(), + qualifiedName(ctx.sortFieldExpression().fieldExpression().getText()), ArgumentFactory.getArgumentList(ctx) ); } @@ -227,7 +230,7 @@ public UnresolvedExpression visitEvalFunctionCall(EvalFunctionCallContext ctx) { @Override public UnresolvedExpression visitTableSource(TableSourceContext ctx) { - return visitIdentifier(ctx); + return visitIdentifiers(Arrays.asList(ctx)); } /** @@ -235,13 +238,13 @@ public UnresolvedExpression visitTableSource(TableSourceContext ctx) { */ @Override public UnresolvedExpression visitIdentsAsQualifiedName(IdentsAsQualifiedNameContext ctx) { - return visitIdentifier(ctx.ident()); + return visitIdentifiers(ctx.ident()); } @Override public UnresolvedExpression visitIdentsAsWildcardQualifiedName( IdentsAsWildcardQualifiedNameContext ctx) { - return visitIdentifier(ctx.wildcard()); + return visitIdentifiers(ctx.wildcard()); } @Override @@ -270,9 +273,13 @@ public UnresolvedExpression visitBooleanLiteral(BooleanLiteralContext ctx) { return new Literal(Boolean.valueOf(ctx.getText()), DataType.BOOLEAN); } - private UnresolvedExpression visitIdentifier(ParserRuleContext ctx) { - return new QualifiedName(Collections.singletonList( - StringUtils.unquoteIdentifier(ctx.getText()))); + private QualifiedName visitIdentifiers(List ctx) { + return new QualifiedName( + ctx.stream() + .map(RuleContext::getText) + .map(StringUtils::unquoteIdentifier) + .collect(Collectors.toList()) + ); } } diff --git a/ppl/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/parser/AstBuilderTest.java b/ppl/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/parser/AstBuilderTest.java index 11b035153a..82bd27aa0e 100644 --- a/ppl/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/parser/AstBuilderTest.java +++ b/ppl/src/test/java/com/amazon/opendistroforelasticsearch/sql/ppl/parser/AstBuilderTest.java @@ -38,6 +38,7 @@ import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.map; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.nullLiteral; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.projectWithArg; +import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.qualifiedName; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.rareTopN; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.relation; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.rename; @@ -112,6 +113,16 @@ public void testWhereCommand() { ); } + @Test + public void testWhereCommandWithQualifiedName() { + assertEqual("search source=t | where a.v=1", + filter( + relation("t"), + compare("=", field(qualifiedName("a", "v")), intLiteral(1)) + ) + ); + } + @Test public void testFieldsCommandWithoutArguments() { assertEqual("source=t | fields f, g", @@ -142,6 +153,16 @@ public void testFieldsCommandWithExcludeArguments() { )); } + @Test + public void testSearchCommandWithQualifiedName() { + assertEqual("source=t | fields f.v, g.v", + projectWithArg( + relation("t"), + defaultFieldsArgs(), + field(qualifiedName("f", "v")), field(qualifiedName("g", "v")) + )); + } + @Test public void testRenameCommand() { assertEqual("source=t | rename f as g", diff --git a/protocol/build.gradle b/protocol/build.gradle index fb311bcc4b..8259d13217 100644 --- a/protocol/build.gradle +++ b/protocol/build.gradle @@ -11,8 +11,8 @@ repositories { dependencies { // https://github.com/google/guava/wiki/CVE-2018-10237 compile group: 'com.google.guava', name: 'guava', version: '29.0-jre' - compile group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: '2.10.4' - compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.10.4' + compile group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: '2.10.5' + compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.10.5.1' implementation 'com.google.code.gson:gson:2.8.6' compile project(':core') compile project(':elasticsearch') diff --git a/protocol/src/main/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/format/JdbcResponseFormatter.java b/protocol/src/main/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/format/JdbcResponseFormatter.java index b293e2db77..52f8c1d703 100644 --- a/protocol/src/main/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/format/JdbcResponseFormatter.java +++ b/protocol/src/main/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/format/JdbcResponseFormatter.java @@ -18,6 +18,8 @@ import com.amazon.opendistroforelasticsearch.sql.common.antlr.SyntaxCheckException; import com.amazon.opendistroforelasticsearch.sql.data.type.ExprType; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.error.ErrorMessage; +import com.amazon.opendistroforelasticsearch.sql.elasticsearch.response.error.ErrorMessageFactory; import com.amazon.opendistroforelasticsearch.sql.exception.QueryEngineException; import com.amazon.opendistroforelasticsearch.sql.executor.ExecutionEngine.Schema; import com.amazon.opendistroforelasticsearch.sql.protocol.response.QueryResult; @@ -56,11 +58,13 @@ protected Object buildJsonObject(QueryResult response) { @Override public String format(Throwable t) { + int status = getStatus(t); + ErrorMessage message = ErrorMessageFactory.createErrorMessage(t, status); Error error = new Error( - t.getClass().getSimpleName(), - t.getMessage(), - t.getMessage()); - return jsonify(new JdbcErrorResponse(error, getStatus(t))); + message.getType(), + message.getReason(), + message.getDetails()); + return jsonify(new JdbcErrorResponse(error, status)); } private Column fetchColumn(Schema.Column col) { diff --git a/protocol/src/test/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/format/JdbcResponseFormatterTest.java b/protocol/src/test/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/format/JdbcResponseFormatterTest.java index 2705b31d50..e421016c00 100644 --- a/protocol/src/test/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/format/JdbcResponseFormatterTest.java +++ b/protocol/src/test/java/com/amazon/opendistroforelasticsearch/sql/protocol/response/format/JdbcResponseFormatterTest.java @@ -39,6 +39,7 @@ import com.google.common.collect.ImmutableMap; import com.google.gson.JsonParser; import java.util.Arrays; +import org.elasticsearch.ElasticsearchException; import org.junit.jupiter.api.DisplayNameGeneration; import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; @@ -118,7 +119,7 @@ void format_client_error_response_due_to_syntax_exception() { "{\"error\":" + "{\"" + "type\":\"SyntaxCheckException\"," - + "\"reason\":\"Invalid query syntax\"," + + "\"reason\":\"Invalid Query\"," + "\"details\":\"Invalid query syntax\"" + "}," + "\"status\":400}", @@ -132,7 +133,7 @@ void format_client_error_response_due_to_semantic_exception() { "{\"error\":" + "{\"" + "type\":\"SemanticCheckException\"," - + "\"reason\":\"Invalid query semantics\"," + + "\"reason\":\"Invalid Query\"," + "\"details\":\"Invalid query semantics\"" + "}," + "\"status\":400}", @@ -146,7 +147,7 @@ void format_server_error_response() { "{\"error\":" + "{\"" + "type\":\"IllegalStateException\"," - + "\"reason\":\"Execution error\"," + + "\"reason\":\"There was internal problem at backend\"," + "\"details\":\"Execution error\"" + "}," + "\"status\":503}", @@ -154,6 +155,25 @@ void format_server_error_response() { ); } + @Test + void format_server_error_response_due_to_elasticsearch() { + assertJsonEquals( + "{\"error\":" + + "{\"" + + "type\":\"ElasticsearchException\"," + + "\"reason\":\"Error occurred in Elasticsearch engine: all shards failed\"," + + "\"details\":\"ElasticsearchException[all shards failed]; " + + "nested: IllegalStateException[Execution error];; " + + "java.lang.IllegalStateException: Execution error\\n" + + "For more details, please send request for Json format to see the raw response " + + "from elasticsearch engine.\"" + + "}," + + "\"status\":503}", + formatter.format(new ElasticsearchException("all shards failed", + new IllegalStateException("Execution error"))) + ); + } + private static void assertJsonEquals(String expected, String actual) { assertEquals( JsonParser.parseString(expected), diff --git a/sql-jdbc/build.gradle b/sql-jdbc/build.gradle index 7457c9ef71..a72c310804 100644 --- a/sql-jdbc/build.gradle +++ b/sql-jdbc/build.gradle @@ -52,7 +52,7 @@ repositories { dependencies { implementation group: 'org.apache.httpcomponents', name: 'httpclient', version: '4.5.6' - implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.7' + implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.10.5' implementation group: 'com.amazonaws', name: 'aws-java-sdk-core', version: '1.11.452' testImplementation('org.junit.jupiter:junit-jupiter-api:5.3.1') diff --git a/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/types/ElasticsearchType.java b/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/types/ElasticsearchType.java index 8c71bf8103..a4a49fd381 100644 --- a/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/types/ElasticsearchType.java +++ b/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/types/ElasticsearchType.java @@ -77,6 +77,7 @@ public enum ElasticsearchType { TIMESTAMP(JDBCType.TIMESTAMP, Timestamp.class, 24, 24, false), BINARY(JDBCType.VARBINARY, String.class, Integer.MAX_VALUE, 0, false), NULL(JDBCType.NULL, null, 0, 0, false), + UNDEFINED(JDBCType.NULL, null, 0, 0, false), UNSUPPORTED(JDBCType.OTHER, null, 0, 0, false); private static final Map jdbcTypeToESTypeMap; @@ -84,7 +85,7 @@ public enum ElasticsearchType { static { // Map JDBCType to corresponding ElasticsearchType jdbcTypeToESTypeMap = new HashMap<>(); - jdbcTypeToESTypeMap.put(JDBCType.NULL, NULL); + jdbcTypeToESTypeMap.put(JDBCType.NULL, UNDEFINED); jdbcTypeToESTypeMap.put(JDBCType.BOOLEAN, BOOLEAN); jdbcTypeToESTypeMap.put(JDBCType.TINYINT, BYTE); jdbcTypeToESTypeMap.put(JDBCType.SMALLINT, SHORT); diff --git a/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/types/TypeConverters.java b/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/types/TypeConverters.java index 1b11cdfb1e..6d15590378 100644 --- a/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/types/TypeConverters.java +++ b/sql-jdbc/src/main/java/com/amazon/opendistroforelasticsearch/jdbc/types/TypeConverters.java @@ -18,6 +18,7 @@ import java.sql.Date; import java.sql.JDBCType; +import java.sql.SQLException; import java.sql.Time; import java.sql.Timestamp; import java.util.Arrays; @@ -63,6 +64,8 @@ public class TypeConverters { tcMap.put(JDBCType.BIGINT, new BigIntTypeConverter()); tcMap.put(JDBCType.BINARY, new BinaryTypeConverter()); + + tcMap.put(JDBCType.NULL, new NullTypeConverter()); } public static TypeConverter getInstance(JDBCType jdbcType) { @@ -302,4 +305,14 @@ public Class getDefaultJavaClass() { return Short.class; } } + + public static class NullTypeConverter implements TypeConverter { + + @Override + public T convert(Object value, Class clazz, Map conversionParams) { + // As Javadoc for ResultSet.getObject() API, a SQL NULL needs to be converted to + // a JAVA null here + return null; + } + } } diff --git a/sql-jdbc/src/test/java/com/amazon/opendistroforelasticsearch/jdbc/types/TypesTests.java b/sql-jdbc/src/test/java/com/amazon/opendistroforelasticsearch/jdbc/types/TypesTests.java index 055cf27f7f..c62186608a 100644 --- a/sql-jdbc/src/test/java/com/amazon/opendistroforelasticsearch/jdbc/types/TypesTests.java +++ b/sql-jdbc/src/test/java/com/amazon/opendistroforelasticsearch/jdbc/types/TypesTests.java @@ -16,9 +16,18 @@ package com.amazon.opendistroforelasticsearch.jdbc.types; +import static java.util.Collections.emptyMap; +import static org.junit.jupiter.api.Assertions.assertNull; + +import java.sql.JDBCType; +import java.sql.SQLException; +import org.junit.jupiter.api.Test; + public class TypesTests { - - public void testIntegerTypeConverter() { - TypeConverters.IntegerTypeConverter tc = new TypeConverters.IntegerTypeConverter(); + + @Test + public void testNullTypeConverter() throws SQLException { + TypeConverter tc = TypeConverters.getInstance(JDBCType.NULL); + assertNull(tc.convert(null, Object.class, emptyMap())); } } diff --git a/sql-odbc/docs/dev/BUILD_INSTRUCTIONS.md b/sql-odbc/docs/dev/BUILD_INSTRUCTIONS.md index f7d60cc949..41dbf888ba 100644 --- a/sql-odbc/docs/dev/BUILD_INSTRUCTIONS.md +++ b/sql-odbc/docs/dev/BUILD_INSTRUCTIONS.md @@ -2,85 +2,104 @@ ## Windows -### Dependencies +### Requirements -* [cmake](https://cmake.org/install/) -* [Visual Studio 2019](https://visualstudio.microsoft.com/vs/) (Other versions may work, but only 2019 has been tested) -* [ODBC Driver source code](https://github.com/opendistro-for-elasticsearch/sql/tree/master/sql-odbc) +* Install [cmake](https://cmake.org/install/) +* Install [Visual Studio 2019](https://visualstudio.microsoft.com/vs/) + * Other versions may work, but only 2019 has been tested -### Build +**NOTE:** All Windows/`.ps1` scripts must be run from a [Developer Powershell](https://devblogs.microsoft.com/visualstudio/the-powershell-you-know-and-love-now-with-a-side-of-visual-studio/). -#### with Visual Studio +> A shortcut is installed on your system with Visual Studio (**"Developer Powershell for VS 2019"**) -Run `./build_win_.ps1` to generate a VS2019 project for building/testing the driver. (the build may fail, but should still generate a `.sln` file) +> Programs launched with this prompt (ex: VS Code) also have access to the Developer shell. -The solution can be found at `\build\odbc\build\global_make_list.sln`. +> For more information regarding the Developer Powershell: https://docs.microsoft.com/en-us/cpp/build/building-on-the-command-line?view=msvc-160 -#### with Developer Powershell +### Build -Use `./build_win_.ps1` to build the driver from a Developer Powershell prompt. +``` +.\build_win_.ps1 +``` -> A shortcut is installed on your system with Visual Studio (search for **"Developer Powershell for VS 2019"**) +A Visual Studio 2019 solution will also be generated for building/testing the driver with Visual Studio. (Note: if the build fails, the solution will still be generated.) -> Programs launched with this prompt (ex: VS Code) also have access to the Developer shell. +Solution file: `\build\odbc\cmake\global_make_list.sln`. -### Build Output +### Output ``` build -└-- - └-- odbc - └-- bin - └-- - └-- build - └-- lib - └-- aws-sdk - └-- build - └-- install +└-- odbc + └-- bin + └-- + └-- cmake + └-- lib +└-- aws-sdk + └-- build + └-- install ``` -* Driver DLL: `.\build\\odbc\bin\\odfesqlodbc.dll` -* Test binaries folder: `.\build\\odbc\bin\` +* Driver DLL: `.\build\odbc\bin\\odfesqlodbc.dll` +* Test binaries folder: `.\build\odbc\bin\\` ### Packaging -From a Developer Powershell, run: +**Note:** If you make changes to the driver code or CMake project files, re-run the `build_windows_.sh` script before running the following command. + ``` -msbuild .\build\Release\odbc\PACKAGE.vcxproj -p:Configuration=Release +msbuild .\build\odbc\PACKAGE.vcxproj -p:Configuration=Release ``` -An installer named as `Open Distro for Elasticsearch SQL ODBC Driver--Windows--bit.msi` will be generated in the build directory. +`Open Distro for Elasticsearch SQL ODBC Driver--Windows--bit.msi` will be generated in the build directory. +### Testing +See [run_tests.md](./run_tests.md) ## Mac -(TODO: upgrade build scripts & documentation for Mac) -### Dependencies +### Requirements -Homebrew must be installed to manage packages, to install homebrew see the [homebrew homepage](https://brew.sh/). -Using homebrew, install the following packages using the command provided: ->brew install [package] -> ->* curl ->* cmake ->* libiodbc +[Homebrew](https://brew.sh/) is required for installing the following necessary packages. +``` +brew install curl +brew install cmake +brew install libiodbc +``` -### Building the Driver +### Build -From a Bash shell: +``` +./build_mac_.sh +``` -`./build_mac_.sh` +### Output -### Output Location on Mac +``` +build +└-- odbc + └-- bin + └-- lib +└-- cmake-build64 + └-- +``` -Compiling on Mac will output the tests to **bin64** and the driver to **lib64**. There are also some additional test infrastructure files which output to the **lib64** directory. +* Driver library: `./build/odbc/lib/libodfesqlodbc.dylib` +* Test binaries folder: `./build/odbc/bin/` ### Packaging -Run below command from the project's build directory. ->cpack . +**Note:** If you make changes to the driver code or CMake project files, re-run the `build_mac_.sh` script before running the following command. + +``` +cd cmake-build64/ +cpack . +``` + +`Open Distro for Elasticsearch SQL ODBC Driver--Darwin.pkg` will be generated in the build directory. -Installer named as `Open Distro for Elasticsearch SQL ODBC Driver--Darwin.pkg` will be generated in the build directory. +### Testing +See [run_tests.md](./run_tests.md) ## General Build Info diff --git a/sql-odbc/docs/dev/run_tests.md b/sql-odbc/docs/dev/run_tests.md index 903e5f8f0a..8f7fb1b87a 100644 --- a/sql-odbc/docs/dev/run_tests.md +++ b/sql-odbc/docs/dev/run_tests.md @@ -1,14 +1,16 @@ # ODFE SQL ODBC Driver Testing -## Preparation +## Requirements * Latest version of [Open Distro for Elasticsearch](https://opendistro.github.io/for-elasticsearch-docs/docs/install/) +* [Required datasets loaded](#set-up-test-datasets) +* [DSN configured](#set-up-dsn) -### Loading Test Datasets +### Set up test datasets -Loading a dataset requires an [elasticsearch](https://opendistro.github.io/for-elasticsearch-docs/docs/install/) service running with [kibana](https://opendistro.github.io/for-elasticsearch-docs/docs/kibana/). If either of these are missing, please refer to the documentation on how to set them up. +Loading a dataset requires an [OpenDistro for Elasticsearch](https://opendistro.github.io/for-elasticsearch-docs/docs/install/) service running with [Kibana](https://opendistro.github.io/for-elasticsearch-docs/docs/kibana/). If either of these are missing, please refer to the documentation on how to set them up. -Note, if you wish to work with SSL/TLS, you need to configure Elasticsearch and Kibana to support it. See Working With SSL/TLS below. +Note, if you wish to work with SSL/TLS, you need to configure ODFE and Kibana to support it. See the [build instructions](./BUILD_INSTRUCTIONS.md) for more info. First load the sample datasets provided by kibana. @@ -23,14 +25,9 @@ Select the wrench on the left control panel. Enter the following commands into t * [kibana_sample_data_types](./datasets/kibana_sample_data_types.md) -## Running Tests - -Tests can be **executed directly**, or by using the **Test Runner**. - -**NOTES:** +### Set up DSN -* A test DSN named `test_dsn` must be set up in order for certain tests in ITODBCConnection to pass. To configure the DSN, see the instructions, below. -* Datasets must be loaded into Elasticsearch using [kibana](https://www.elastic.co/guide/en/kibana/current/connect-to-elasticsearch.html). See the section on loading datasets below. +A test DSN named `test_dsn` must be set up in order for certain tests in ITODBCConnection to pass. To configure the DSN, see the instructions below. ### Windows Test DSN Setup @@ -50,7 +47,12 @@ Tests can be **executed directly**, or by using the **Test Runner**. * `export ODBCINSTINI=/src/IntegrationTests/ITODBCConnection/test_odbcinst.ini` * Manually add the entries to your existing `odbc.ini` and `odbcinst.ini` entries. (normally found at `~/.odbc.ini` and `~/.odbcinst.ini`) -### Running Tests directly on Windows +## Running Tests +Tests can be executed directly, or by using the **Test Runner**. + +### Direct + +#### Windows Tests can be executed directly using **Visual Studio** by setting the desired test as a **Start up Project** @@ -60,23 +62,23 @@ Tests can be executed directly using **Visual Studio** by setting the desired te For more information, see the [Visual Studio Console Application documentation](https://docs.microsoft.com/en-us/cpp/build/vscpp-step-2-build?view=vs-2019). -### Running Tests directly on Mac +#### Mac Tests can be executed using a command line interface. From the project root directory, enter: > **bin64/** -### Running Tests using the Test Runner +### Test Runner The **Test Runner** requires [python](https://wiki.python.org/moin/BeginnersGuide/Download) to be installed on the system. Running the **Test Runner** will execute all the tests and compile a report with the results. The report indicates the execution status of all tests along with the execution time. To find error details of any failed test, hover over the test. -#### Running Tests using the Test Runner on Windows +#### Windows Open the project's root directory in a command line interface of your choice. Execute >**.\run_test_runner.bat** The **Test Runner** has been tried and tested with [Python3.8](https://www.python.org/downloads/release/python-380/) on **Windows systems**. Other versions of Python may work, but are untested. -#### Running Tests using the Test Runner on Mac +#### Mac Open the project's root directory in a command line interface of your choice. Execute >**./run_test_runner.sh** @@ -87,12 +89,15 @@ The **Test Runner** has been tried and tested with [Python3.7.6](https://www.pyt (using a CMake script provided by George Cave (StableCoder) under the Apache 2.0 license, found [here](https://github.com/StableCoder/cmake-scripts/blob/master/code-coverage.cmake)) -> **NOTE**: Before building with coverage, make sure the following directory is in your PATH environment variable: -> `/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin` +#### Requirements +* `llvm-cov` in your PATH environment variable + * Possible locations for this binary: + * `/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin` + * `/Library/Developer/CommandLineTools/usr/bin` To build the tests with code coverage enabled, set the `CODE_COVERAGE` variable to `ON` when preparing your CMake build. ```bash -cmake ... -DBUILD_WITH_TESTS=ON -DCODE_COVERAGE=ON +cmake -DBUILD_WITH_TESTS=ON -DCODE_COVERAGE=ON ``` To get coverage for the driver library, you must use the `ccov-all` target, which runs all test suites and components with coverage. diff --git a/sql-odbc/src/odfesqlodbc/es_communication.cpp b/sql-odbc/src/odfesqlodbc/es_communication.cpp index 8f895c6b09..e40f170c79 100644 --- a/sql-odbc/src/odfesqlodbc/es_communication.cpp +++ b/sql-odbc/src/odfesqlodbc/es_communication.cpp @@ -36,7 +36,7 @@ static const std::string SQL_ENDPOINT_FORMAT_JDBC = static const std::string SQL_ENDPOINT_CLOSE_CURSOR = "/_opendistro/_sql/close"; static const std::string PLUGIN_ENDPOINT_FORMAT_JSON = "/_cat/plugins?format=json"; -static const std::string OPENDISTRO_SQL_PLUGIN_NAME = "opendistro_sql"; +static const std::string OPENDISTRO_SQL_PLUGIN_NAME = "opendistro-sql"; static const std::string ALLOCATION_TAG = "AWS_SIGV4_AUTH"; static const std::string SERVICE_NAME = "es"; static const std::string ESODBC_PROFILE_NAME = "elasticsearchodbc"; diff --git a/sql/src/main/antlr/OpenDistroSQLLexer.g4 b/sql/src/main/antlr/OpenDistroSQLLexer.g4 index 8f094179d9..17eb7565d2 100644 --- a/sql/src/main/antlr/OpenDistroSQLLexer.g4 +++ b/sql/src/main/antlr/OpenDistroSQLLexer.g4 @@ -208,6 +208,7 @@ MODULUS: 'MODULUS'; MONTHNAME: 'MONTHNAME'; MULTIPLY: 'MULTIPLY'; NOW: 'NOW'; +NULLIF: 'NULLIF'; PI: 'PI'; POW: 'POW'; POWER: 'POWER'; diff --git a/sql/src/main/antlr/OpenDistroSQLParser.g4 b/sql/src/main/antlr/OpenDistroSQLParser.g4 index 949e96de24..c6d2b15085 100644 --- a/sql/src/main/antlr/OpenDistroSQLParser.g4 +++ b/sql/src/main/antlr/OpenDistroSQLParser.g4 @@ -295,6 +295,7 @@ scalarFunctionName : mathematicalFunctionName | dateTimeFunctionName | textFunctionName + | flowControlFunctionName ; specificFunction @@ -356,6 +357,10 @@ textFunctionName | CONCAT | CONCAT_WS | SUBSTR | LENGTH | STRCMP | RIGHT ; +flowControlFunctionName + : IF | IFNULL | NULLIF | ISNULL + ; + functionArgs : functionArg (COMMA functionArg)* ; diff --git a/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstAggregationBuilder.java b/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstAggregationBuilder.java index 39ac8575fa..3a62461d41 100644 --- a/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstAggregationBuilder.java +++ b/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstAggregationBuilder.java @@ -21,8 +21,7 @@ import com.amazon.opendistroforelasticsearch.sql.ast.Node; import com.amazon.opendistroforelasticsearch.sql.ast.expression.AggregateFunction; import com.amazon.opendistroforelasticsearch.sql.ast.expression.Alias; -import com.amazon.opendistroforelasticsearch.sql.ast.expression.Function; -import com.amazon.opendistroforelasticsearch.sql.ast.expression.Literal; +import com.amazon.opendistroforelasticsearch.sql.ast.expression.QualifiedName; import com.amazon.opendistroforelasticsearch.sql.ast.expression.UnresolvedExpression; import com.amazon.opendistroforelasticsearch.sql.ast.tree.Aggregation; import com.amazon.opendistroforelasticsearch.sql.ast.tree.UnresolvedPlan; @@ -125,8 +124,7 @@ private List replaceGroupByItemIfAliasOrOrdinal() { */ private Optional findNonAggregatedItemInSelect() { return querySpec.getSelectItems().stream() - .filter(this::isNonAggregatedExpression) - .filter(this::isNonLiteralFunction) + .filter(this::isNonAggregateOrLiteralExpression) .findFirst(); } @@ -134,27 +132,18 @@ private boolean isAggregatorNotFoundAnywhere() { return querySpec.getAggregators().isEmpty(); } - private boolean isNonLiteralFunction(UnresolvedExpression expr) { - // The base case for recursion - if (expr instanceof Literal) { + private boolean isNonAggregateOrLiteralExpression(UnresolvedExpression expr) { + if (expr instanceof AggregateFunction) { return false; } - if (expr instanceof Function) { - List children = expr.getChild(); - return children.stream().anyMatch(child -> - isNonLiteralFunction((UnresolvedExpression) child)); - } - return true; - } - private boolean isNonAggregatedExpression(UnresolvedExpression expr) { - if (expr instanceof AggregateFunction) { - return false; + if (expr instanceof QualifiedName) { + return true; } List children = expr.getChild(); - return children.stream() - .allMatch(child -> isNonAggregatedExpression((UnresolvedExpression) child)); + return children.stream().anyMatch(child -> + isNonAggregateOrLiteralExpression((UnresolvedExpression) child)); } } diff --git a/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstSortBuilder.java b/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstSortBuilder.java index 0af37f6405..1dfd615eaf 100644 --- a/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstSortBuilder.java +++ b/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstSortBuilder.java @@ -17,6 +17,7 @@ package com.amazon.opendistroforelasticsearch.sql.sql.parser; import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.booleanLiteral; +import static com.amazon.opendistroforelasticsearch.sql.ast.dsl.AstDSL.qualifiedName; import static com.amazon.opendistroforelasticsearch.sql.ast.expression.DataType.INTEGER; import static com.amazon.opendistroforelasticsearch.sql.ast.tree.Sort.NullOrder.NULL_FIRST; import static com.amazon.opendistroforelasticsearch.sql.ast.tree.Sort.SortOrder.DESC; diff --git a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstAggregationBuilderTest.java b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstAggregationBuilderTest.java index 87183e4051..e4e831e5b3 100644 --- a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstAggregationBuilderTest.java +++ b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstAggregationBuilderTest.java @@ -133,6 +133,27 @@ void can_build_implicit_group_by_for_aggregator_in_having_clause() { hasGroupByItems(), hasAggregators( alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); + + assertThat( + buildAggregation("SELECT INTERVAL 1 DAY FROM test HAVING AVG(age) > 30"), + allOf( + hasGroupByItems(), + hasAggregators( + alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); + + assertThat( + buildAggregation("SELECT CAST(1 AS LONG) FROM test HAVING AVG(age) > 30"), + allOf( + hasGroupByItems(), + hasAggregators( + alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); + + assertThat( + buildAggregation("SELECT CASE WHEN true THEN 1 ELSE 2 END FROM test HAVING AVG(age) > 30"), + allOf( + hasGroupByItems(), + hasAggregators( + alias("AVG(age)", aggregate("AVG", qualifiedName("age")))))); } @Test diff --git a/workbench/public/components/Main/main.tsx b/workbench/public/components/Main/main.tsx index c6c1074d83..72713c09de 100644 --- a/workbench/public/components/Main/main.tsx +++ b/workbench/public/components/Main/main.tsx @@ -262,10 +262,20 @@ export class Main extends React.Component { }; } if (!response.data.ok) { + let err = response.data.resp; + console.log("Error occurred when processing query response: ", err) + + // Mark fulfilled to true as long as the data is fulfilled + if (response.data.body) { + return { + fulfilled: true, + errorMessage: err, + data: response.data.body + } + } return { fulfilled: false, - errorMessage: response.data.resp, - data: response.data.body, + errorMessage: err, }; }