diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/timeseries/TestTimeSeriesCreateTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/timeseries/TestTimeSeriesCreateTable.scala index d68195cba1c..eabe0f58d5c 100644 --- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/timeseries/TestTimeSeriesCreateTable.scala +++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/timeseries/TestTimeSeriesCreateTable.scala @@ -517,7 +517,7 @@ class TestTimeSeriesCreateTable extends QueryTest with BeforeAndAfterAll with Be |GROUP BY dataTime """.stripMargin) } - assert(e.getMessage.contains("Table or view not found: maintableno")) + assert(e.getMessage.contains("Table or view not found: mainTableNo")) } test("test timeseries create table 33: support event_time and granularity key with space") { diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateUtil.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateUtil.scala index 0314dd88899..db52361cd9e 100644 --- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateUtil.scala +++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/preaaggregate/PreAggregateUtil.scala @@ -196,7 +196,7 @@ object PreAggregateUtil { parentDatabaseName: String, carbonTable: CarbonTable) : ColumnTableRelation = { val parentColumnId = carbonTable.getColumnByName(parentTableName, parentColumnName).getColumnId - val columnTableRelation = ColumnTableRelation(parentColumnName = parentColumnName, + val columnTableRelation = ColumnTableRelation(parentColumnName = parentColumnName.toLowerCase(), parentColumnId = parentColumnId, parentTableName = parentTableName, parentDatabaseName = parentDatabaseName, parentTableId = parentTableId) @@ -386,13 +386,14 @@ object PreAggregateUtil { aggregateType: String = "", parentTableName: String, columnTableRelationList: Seq[ColumnTableRelation]): (Field, DataMapField) = { - val actualColumnName = if (aggregateType.equals("")) { + var actualColumnName = if (aggregateType.equals("")) { parentTableName + '_' + columnName } else { parentTableName + '_' + columnName + '_' + aggregateType } val rawSchema = '`' + actualColumnName + '`' + ' ' + dataType.typeName val dataMapField = DataMapField(aggregateType, Some(columnTableRelationList)) + actualColumnName = actualColumnName.toLowerCase() if (dataType.typeName.startsWith("decimal")) { val (precision, scale) = CommonUtil.getScaleAndPrecision(dataType.catalogString) (Field(column = actualColumnName, diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala index 1c3b7cf2916..d2fdb08ea3d 100644 --- a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala +++ b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala @@ -691,7 +691,7 @@ class CarbonSpark2SqlParser extends CarbonDDLSqlParser { } def addPreAggFunction(sql: String): String = { - addPreAgg(new lexical.Scanner(sql.toLowerCase)) match { + addPreAgg(new lexical.Scanner(sql)) match { case Success(query, _) => query case _ => throw new MalformedCarbonCommandException(s"Unsupported query") @@ -699,7 +699,7 @@ class CarbonSpark2SqlParser extends CarbonDDLSqlParser { } def addPreAggLoadFunction(sql: String): String = { - addPreAggLoad(new lexical.Scanner(sql.toLowerCase)) match { + addPreAggLoad(new lexical.Scanner(sql)) match { case Success(query, _) => query case _ => throw new MalformedCarbonCommandException(s"Unsupported query")