Skip to content
Permalink
Browse files
[CARBONDATA-4328] Load parquet table with options error message fix
Why is this PR needed?
If parquet table is created and load statement with options is
triggerred, then its failing with NoSuchTableException:
Table ${tableIdentifier.table} does not exist.

What changes were proposed in this PR?
As parquet table load is not handled, added a check to filter out
non-carbon tables in the parser. So that, the spark parser can handle the statement.

This closes #4253
  • Loading branch information
ShreelekhyaG authored and Indhumathi27 committed Mar 29, 2022
1 parent 41831ce commit d6ce9467488d342134f8072326667aef7031e8a2
Showing 3 changed files with 24 additions and 3 deletions.
@@ -19,8 +19,10 @@ package org.apache.spark.sql.parser

import scala.language.implicitConversions

import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.execution.command.management.RefreshCarbonTableCommand
import org.apache.spark.sql.execution.strategy.CarbonPlanHelper

/**
* Parser for All Carbon DDL, DML cases in Unified context
@@ -51,7 +53,8 @@ class CarbonExtensionSpark2SqlParser extends CarbonSpark2SqlParser {
(INTO ~> TABLE ~> (ident <~ ".").? ~ ident) ~
(PARTITION ~> "(" ~> repsep(partitions, ",") <~ ")").? ~
(OPTIONS ~> "(" ~> repsep(options, ",") <~ ")") <~ opt(";") ^^ {
case filePath ~ isOverwrite ~ table ~ partitions ~ optionsList =>
case filePath ~ isOverwrite ~ table ~ partitions ~ optionsList
if CarbonPlanHelper.isCarbonTable(TableIdentifier(table._2, table._1)) =>
val (databaseNameOp, tableName) = table match {
case databaseName ~ tableName => (databaseName, tableName.toLowerCase())
}
@@ -495,7 +495,8 @@ class CarbonSpark2SqlParser extends CarbonDDLSqlParser {
(INTO ~> TABLE ~> (ident <~ ".").? ~ ident) ~
(PARTITION ~> "(" ~> repsep(partitions, ",") <~ ")").? ~
(OPTIONS ~> "(" ~> repsep(options, ",") <~ ")").? <~ opt(";") ^^ {
case filePath ~ isOverwrite ~ table ~ partitions ~ optionsList =>
case filePath ~ isOverwrite ~ table ~ partitions ~ optionsList
if CarbonPlanHelper.isCarbonTable(TableIdentifier(table._2, table._1)) =>
val (databaseNameOp, tableName) = table match {
case databaseName ~ tableName => (databaseName, tableName.toLowerCase())
}
@@ -21,7 +21,7 @@ import java.io.File

import org.apache.commons.codec.binary.{Base64, Hex}
import org.apache.commons.io.FileUtils
import org.apache.spark.sql.Row
import org.apache.spark.sql.{AnalysisException, Row}
import org.apache.spark.sql.test.util.QueryTest
import org.apache.spark.util.SparkUtil
import org.scalatest.BeforeAndAfterAll
@@ -657,6 +657,23 @@ class SparkCarbonDataSourceBinaryTest extends QueryTest with BeforeAndAfterAll {
|| exception.getMessage.contains("DELETE TABLE is not supported temporarily."))
}

test("test load on parquet table") {
sql("drop table if exists parquet_table")
sql("create table parquet_table(empno int, empname string, projdate Date) using parquet")
var ex = intercept[AnalysisException] {
sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE parquet_table
""".stripMargin)
}
assert(ex.getMessage
.contains("LOAD DATA is not supported for datasource tables: `default`.`parquet_table`"))
ex = intercept[AnalysisException] {
sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE parquet_table
|OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""".stripMargin)
}
assert(ex.getMessage.contains("mismatched input"))
sql("drop table if exists parquet_table")
}

test("test array of binary data type with sparkfileformat ") {
sql("drop table if exists carbon_table")
sql("drop table if exists parquet_table")

0 comments on commit d6ce946

Please sign in to comment.