Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,8 @@
import org.apache.flink.table.operations.ddl.DropTableOperation;
import org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation;
import org.apache.flink.table.operations.ddl.DropViewOperation;
import org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer;
import org.apache.flink.table.planner.delegation.hive.copy.HiveParserStorageFormat;
import org.apache.flink.table.planner.delegation.hive.desc.DropPartitionDesc;
import org.apache.flink.table.planner.delegation.hive.desc.HiveParserAlterDatabaseDesc;
import org.apache.flink.table.planner.delegation.hive.desc.HiveParserAlterTableDesc;
Expand All @@ -91,8 +93,6 @@
import org.apache.flink.table.planner.delegation.hive.desc.HiveParserDropFunctionDesc;
import org.apache.flink.table.planner.delegation.hive.desc.HiveParserDropTableDesc;
import org.apache.flink.table.planner.delegation.hive.desc.HiveParserShowTablesDesc;
import org.apache.flink.table.planner.delegation.hive.parse.HiveParserBaseSemanticAnalyzer;
import org.apache.flink.table.planner.delegation.hive.parse.HiveParserStorageFormat;
import org.apache.flink.table.planner.utils.OperationConverterUtils;

import org.apache.hadoop.hive.metastore.api.FieldSchema;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,14 @@
import org.apache.flink.table.planner.calcite.SqlExprToRexConverter;
import org.apache.flink.table.planner.delegation.ParserImpl;
import org.apache.flink.table.planner.delegation.PlannerContext;
import org.apache.flink.table.planner.delegation.hive.copy.HiveASTParseException;
import org.apache.flink.table.planner.delegation.hive.copy.HiveASTParseUtils;
import org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode;
import org.apache.flink.table.planner.delegation.hive.copy.HiveParserContext;
import org.apache.flink.table.planner.delegation.hive.copy.HiveParserQueryState;
import org.apache.flink.table.planner.delegation.hive.desc.CreateTableASDesc;
import org.apache.flink.table.planner.delegation.hive.desc.HiveParserCreateViewDesc;
import org.apache.flink.table.planner.delegation.hive.parse.HiveASTParseException;
import org.apache.flink.table.planner.delegation.hive.parse.HiveASTParseUtils;
import org.apache.flink.table.planner.delegation.hive.parse.HiveASTParser;
import org.apache.flink.table.planner.delegation.hive.parse.HiveParserASTNode;
import org.apache.flink.table.planner.delegation.hive.parse.HiveParserDDLSemanticAnalyzer;
import org.apache.flink.table.planner.parse.CalciteParser;
import org.apache.flink.table.planner.plan.FlinkCalciteCatalogReader;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

package org.apache.flink.table.planner.delegation.hive;

import org.apache.flink.table.planner.delegation.hive.parse.HiveParserASTNode;
import org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode;

/** Util class for the hive parser. */
public class HiveParserUtils {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,11 @@
* limitations under the License.
*/

package org.apache.flink.table.planner.delegation.hive.parse;
package org.apache.flink.table.planner.delegation.hive.copy;

import org.apache.flink.table.planner.delegation.hive.HiveParserContext;
import org.apache.flink.table.planner.delegation.hive.parse.HiveASTHintParser;
import org.apache.flink.table.planner.delegation.hive.parse.HiveASTLexer;
import org.apache.flink.table.planner.delegation.hive.parse.HiveASTParser;

import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CharStream;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
* limitations under the License.
*/

package org.apache.flink.table.planner.delegation.hive.parse;
package org.apache.flink.table.planner.delegation.hive.copy;

import org.antlr.runtime.BaseRecognizer;
import org.antlr.runtime.RecognitionException;
Expand All @@ -28,13 +28,13 @@ public class HiveASTParseError {
private final RecognitionException re;
private final String[] tokenNames;

HiveASTParseError(BaseRecognizer br, RecognitionException re, String[] tokenNames) {
public HiveASTParseError(BaseRecognizer br, RecognitionException re, String[] tokenNames) {
this.br = br;
this.re = re;
this.tokenNames = tokenNames;
}

String getMessage() {
public String getMessage() {
return br.getErrorHeader(re) + " " + br.getErrorMessage(re, tokenNames);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
* limitations under the License.
*/

package org.apache.flink.table.planner.delegation.hive.parse;
package org.apache.flink.table.planner.delegation.hive.copy;

import java.util.ArrayList;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,9 @@
* limitations under the License.
*/

package org.apache.flink.table.planner.delegation.hive.parse;
package org.apache.flink.table.planner.delegation.hive.copy;

import org.apache.flink.table.planner.delegation.hive.HiveParserASTBuilder;
import org.apache.flink.table.planner.delegation.hive.HiveParserContext;
import org.apache.flink.table.planner.delegation.hive.parse.HiveASTParser;
import org.apache.flink.util.Preconditions;

import org.antlr.runtime.tree.CommonTree;
Expand Down Expand Up @@ -189,9 +188,8 @@ private static void processSetColsNode(HiveParserASTNode setCols, ASTSearcher se
return;
case HiveASTParser.TOK_ALLCOLREF:
// We should find an alias of this insert and do (alias).*. This however won't
// fix e.g.
// positional order by alias case, cause we'd still have a star on the top
// level. Bail.
// fix e.g. positional order by alias case, cause we'd still have a star on the
// top level. Bail.
LOG.debug("Replacing SETCOLREF with ALLCOLREF because of nested ALLCOLREF");
setCols.token.setType(HiveASTParser.TOK_ALLCOLREF);
return;
Expand Down Expand Up @@ -222,8 +220,7 @@ private static void processSetColsNode(HiveParserASTNode setCols, ASTSearcher se
default:
// Not really sure how to refer to this (or if we can).
// TODO: We could find a different from branch for the union, that might have an
// alias?
// Or we could add an alias here to refer to, but that might break other
// alias? Or we could add an alias here to refer to, but that might break other
// branches.
LOG.debug(
"Replacing SETCOLREF with ALLCOLREF because of the nested node "
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,9 @@
* limitations under the License.
*/

package org.apache.flink.table.planner.delegation.hive;
package org.apache.flink.table.planner.delegation.hive.copy;

import org.apache.flink.table.planner.delegation.hive.parse.HiveASTParseDriver;
import org.apache.flink.table.planner.delegation.hive.parse.HiveASTParser;
import org.apache.flink.table.planner.delegation.hive.parse.HiveParserASTNode;

/** Counterpart of hive's org.apache.hadoop.hive.ql.optimizer.calcite.translator.ASTBuilder. */
public class HiveParserASTBuilder {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@
* limitations under the License.
*/

package org.apache.flink.table.planner.delegation.hive.parse;
package org.apache.flink.table.planner.delegation.hive.copy;

import org.apache.flink.table.planner.delegation.hive.parse.HiveASTParser;

import org.antlr.runtime.Token;
import org.antlr.runtime.tree.CommonTree;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
* limitations under the License.
*/

package org.apache.flink.table.planner.delegation.hive.parse;
package org.apache.flink.table.planner.delegation.hive.copy;

/** Counterpart of hive's org.apache.hadoop.hive.ql.parse.ASTNodeOrigin. */
public class HiveParserASTNodeOrigin {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,9 @@
* limitations under the License.
*/

package org.apache.flink.table.planner.delegation.hive;
package org.apache.flink.table.planner.delegation.hive.copy;

import org.apache.flink.table.planner.delegation.hive.parse.HiveASTParser;
import org.apache.flink.table.planner.delegation.hive.parse.HiveParserASTNode;
import org.apache.flink.table.planner.delegation.hive.parse.HiveParserBaseSemanticAnalyzer;

import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,17 @@
* limitations under the License.
*/

package org.apache.flink.table.planner.delegation.hive.parse;
package org.apache.flink.table.planner.delegation.hive.copy;

import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.table.planner.delegation.hive.HiveParserConstants;
import org.apache.flink.table.planner.delegation.hive.HiveParserUtils;
import org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.PartitionExpression;
import org.apache.flink.table.planner.delegation.hive.copy.HiveParserPTFInvocationSpec.PartitionSpec;
import org.apache.flink.table.planner.delegation.hive.desc.HiveParserCreateTableDesc.NotNullConstraint;
import org.apache.flink.table.planner.delegation.hive.desc.HiveParserCreateTableDesc.PrimaryKey;
import org.apache.flink.table.planner.delegation.hive.parse.HiveParserPTFInvocationSpec.PartitionExpression;
import org.apache.flink.table.planner.delegation.hive.parse.HiveParserPTFInvocationSpec.PartitionSpec;
import org.apache.flink.table.planner.delegation.hive.parse.HiveASTParser;
import org.apache.flink.table.planner.delegation.hive.parse.HiveParserDDLSemanticAnalyzer;

import org.antlr.runtime.tree.Tree;
import org.apache.calcite.util.ImmutableBitSet;
Expand Down Expand Up @@ -1091,7 +1093,7 @@ public String getNullFormat() {
return nullFormat;
}

protected void analyzeRowFormat(HiveParserASTNode child) throws SemanticException {
public void analyzeRowFormat(HiveParserASTNode child) throws SemanticException {
child = (HiveParserASTNode) child.getChild(0);
int numChildRowFormat = child.getChildCount();
for (int numC = 0; numC < numChildRowFormat; numC++) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.flink.table.planner.delegation.hive.copy;

import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlSpecialOperator;
import org.apache.calcite.sql.type.ReturnTypes;
import org.apache.calcite.sql.type.SqlOperandTypeInference;
import org.apache.calcite.sql.type.SqlTypeName;

/** Counterpart of hive's org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveBetween. */
public class HiveParserBetween extends SqlSpecialOperator {

public static final SqlSpecialOperator INSTANCE = new HiveParserBetween();

private HiveParserBetween() {
super(
"BETWEEN",
SqlKind.BETWEEN,
30,
true,
ReturnTypes.BOOLEAN_NULLABLE,
FIRST_BOOLEAN_THEN_FIRST_KNOWN,
null);
}

/**
* Operand type-inference strategy where an unknown operand type is derived from the first
* operand with a known type, but the first operand is a boolean.
*/
public static final SqlOperandTypeInference FIRST_BOOLEAN_THEN_FIRST_KNOWN =
(callBinding, returnType, operandTypes) -> {
final RelDataType unknownType = callBinding.getValidator().getUnknownType();
RelDataType knownType = unknownType;
for (int i = 1; i < callBinding.getCall().getOperandList().size(); i++) {
SqlNode operand = callBinding.getCall().getOperandList().get(i);
knownType =
callBinding.getValidator().deriveType(callBinding.getScope(), operand);
if (!knownType.equals(unknownType)) {
break;
}
}

RelDataTypeFactory typeFactory = callBinding.getTypeFactory();
operandTypes[0] = typeFactory.createSqlType(SqlTypeName.BOOLEAN);
for (int i = 1; i < operandTypes.length; ++i) {
operandTypes[i] = knownType;
}
};
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,9 @@
* limitations under the License.
*/

package org.apache.flink.table.planner.delegation.hive;
package org.apache.flink.table.planner.delegation.hive.copy;

import org.apache.flink.table.planner.delegation.hive.parse.HiveASTParser;
import org.apache.flink.table.planner.delegation.hive.parse.HiveParserASTNode;

import org.antlr.runtime.TokenRewriteStream;
import org.apache.hadoop.conf.Configuration;
Expand Down Expand Up @@ -88,6 +87,11 @@ public HiveParserContext(Configuration conf) {
viewsTokenRewriteStreams = new HashMap<>();
}

// Find whether we should execute the current query due to explain.
public boolean isExplainSkipExecution() {
return false;
}

public Path getMRTmpPath(URI uri) {
return null;
}
Expand Down
Loading