Skip to content

Commit

Permalink
modify test case
Browse files Browse the repository at this point in the history
  • Loading branch information
windpiger committed Mar 3, 2017
1 parent 35d2b59 commit e3a467e
Showing 1 changed file with 17 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -939,15 +939,16 @@ object SPARK_19667_CREATE_TABLE {
def main(args: Array[String]): Unit = {
val spark = SparkSession.builder().enableHiveSupport().getOrCreate()
try {
val warehousePath = s"file:${spark.sharedState.warehousePath.stripSuffix("/")}"
val warehousePath = new Path(spark.sharedState.warehousePath)
val fs = warehousePath.getFileSystem(spark.sessionState.newHadoopConf())
val defaultDB = spark.sessionState.catalog.getDatabaseMetadata("default")
// default database use warehouse path as its location
assert(defaultDB.locationUri.stripSuffix("/") == warehousePath)
assert(new Path(defaultDB.locationUri) == fs.makeQualified(warehousePath))
spark.sql("CREATE TABLE t(a string)")

val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
// table in default database use the location of default database which is also warehouse path
assert(table.location.stripSuffix("/") == s"$warehousePath/t")
assert(new Path(table.location) == fs.makeQualified(new Path(warehousePath, "t")))
spark.sql("INSERT INTO TABLE t SELECT 1")
assert(spark.sql("SELECT * FROM t").count == 1)

Expand All @@ -956,7 +957,8 @@ object SPARK_19667_CREATE_TABLE {
spark.sql("CREATE TABLE t1(b string)")
val table1 = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t1"))
// table in not default database use the location of its own database
assert(table1.location.stripSuffix("/") == s"$warehousePath/not_default.db/t1")
assert(new Path(table1.location) == fs.makeQualified(
new Path(warehousePath, "not_default.db/t1")))
} finally {
spark.sql("USE default")
}
Expand All @@ -967,45 +969,49 @@ object SPARK_19667_VERIFY_TABLE_PATH {
def main(args: Array[String]): Unit = {
val spark = SparkSession.builder().enableHiveSupport().getOrCreate()
try {
val warehousePath = s"file:${spark.sharedState.warehousePath.stripSuffix("/")}"
val warehousePath = new Path(spark.sharedState.warehousePath)
val fs = warehousePath.getFileSystem(spark.sessionState.newHadoopConf())
val defaultDB = spark.sessionState.catalog.getDatabaseMetadata("default")
// default database use warehouse path as its location
assert(defaultDB.locationUri.stripSuffix("/") == warehousePath)
assert(new Path(defaultDB.locationUri) == fs.makeQualified(warehousePath))

val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
// the table in default database created in job(SPARK_19667_CREATE_TABLE) above,
// which has different warehouse path from this job, its location still equals to
// the location when it's created.
assert(table.location.stripSuffix("/") != s"$warehousePath/t")
assert(new Path(table.location) != fs.makeQualified(new Path(warehousePath, "t")))
assert(spark.sql("SELECT * FROM t").count == 1)

spark.sql("CREATE TABLE t3(d string)")
val table3 = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t3"))
// the table in default database created here in this job, it will use the warehouse path
// of this job as its location
assert(table3.location.stripSuffix("/") == s"$warehousePath/t3")
assert(new Path(table3.location) == fs.makeQualified(new Path(warehousePath, "t3")))

spark.sql("USE not_default")
val table1 = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t1"))
// the table in not default database create in job(SPARK_19667_CREATE_TABLE) above,
// which has different warehouse path from this job, its location still equals to
// the location when it's created.
assert(table1.location.stripSuffix("/") != s"$warehousePath/not_default.db/t1")
assert(new Path(table1.location) != fs.makeQualified(
new Path(warehousePath, "not_default.db/t1")))
assert(!new File(s"$warehousePath/not_default.db/t1").exists())

spark.sql("CREATE TABLE t2(c string)")
val table2 = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t2"))
// the table in not default database created here in this job, it will use the location
// of the database as its location, not the warehouse path in this job
assert(table2.location.stripSuffix("/") != s"$warehousePath/not_default.db/t2")
assert(new Path(table2.location) != fs.makeQualified(
new Path(warehousePath, "not_default.db/t2")))

spark.sql("CREATE DATABASE not_default_1")
spark.sql("USE not_default_1")
spark.sql("CREATE TABLE t4(e string)")
val table4 = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t4"))
// the table created in the database which created in this job, it will use the location
// of the database.
assert(table4.location.stripSuffix("/") == s"$warehousePath/not_default_1.db/t4")
assert(new Path(table4.location) == fs.makeQualified(
new Path(warehousePath, "not_default_1.db/t4")))

} finally {
spark.sql("DROP TABLE IF EXISTS t4")
Expand Down

0 comments on commit e3a467e

Please sign in to comment.