Skip to content

Commit

Permalink
Fixing tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Alexey Kudinkin committed Dec 7, 2022
1 parent 6d9c8ae commit 6f10d56
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ class HoodieSparkSqlTestBase extends FunSuite with BeforeAndAfterAll {
spark.sql(sql)
} catch {
case e: Throwable =>
assertResult(errorMsg)(e.getMessage)
assertResult(errorMsg.trim)(e.getMessage.trim)
hasException = true
}
assertResult(true)(hasException)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -530,16 +530,18 @@ class TestInsertTable extends HoodieSparkSqlTestBase {
| partitioned by (dt)
""".stripMargin)
checkException(s"insert into $tableName partition(dt = '2021-06-20') select 1, 'a1', 10, '2021-06-20'") (
"Expected table's schema: " +
"[StructField(id,IntegerType,true), StructField(name,StringType,true), StructField(price,DoubleType,true), StructField(dt,StringType,true)], " +
"query's output (including static partition values): " +
"[StructField(1,IntegerType,false), StructField(a1,StringType,false), StructField(10,IntegerType,false), StructField(2021-06-20,StringType,false), StructField(dt,StringType,true)]"
"""
|Cannot write to 'default.h0', too many data columns:
|Table columns: 'id', 'name', 'price'
|Data columns: '1', 'a1', '10', '2021-06-20'
|""".stripMargin
)
checkException(s"insert into $tableName select 1, 'a1', 10")(
"Expected table's schema: " +
"[StructField(id,IntegerType,true), StructField(name,StringType,true), StructField(price,DoubleType,true), StructField(dt,StringType,true)], " +
"query's output (including static partition values): " +
"[StructField(1,IntegerType,false), StructField(a1,StringType,false), StructField(10,IntegerType,false)]"
"""
|Cannot write to 'default.h0', not enough data columns:
|Table columns: 'id', 'name', 'price', 'dt'
|Data columns: '1', 'a1', '10'
|""".stripMargin
)
spark.sql("set hoodie.sql.bulk.insert.enable = true")
spark.sql("set hoodie.sql.insert.mode = strict")
Expand Down

0 comments on commit 6f10d56

Please sign in to comment.