Skip to content

Commit

Permalink
Improve code coverage.
Browse files Browse the repository at this point in the history
  • Loading branch information
morazow committed Feb 17, 2021
1 parent d38cd1c commit 78e4a2f
Show file tree
Hide file tree
Showing 12 changed files with 82 additions and 31 deletions.
4 changes: 2 additions & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ lazy val orgSettings = Seq(
)

lazy val buildSettings = Seq(
scalaVersion := "2.12.13",
crossScalaVersions := Seq("2.11.12", "2.12.13")
scalaVersion := "2.12.12",
crossScalaVersions := Seq("2.11.12", "2.12.12")
)

lazy val root =
Expand Down
29 changes: 15 additions & 14 deletions doc/changes/changes_1.0.0.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

## Refactoring

* #40: Added Exasol testcontainers, refactoring test environment (PR #87).
* #40: Added Exasol testcontainers, refactored test environment (PR #87).

## Documentation

Expand All @@ -14,23 +14,24 @@

### Runtime Dependency Updates

* Updated to `com.exasol:exasol-jdbc:7.0.0` to `7.0.7`
* Updated to `org.apache.spark:spark-core:2.4.5` to `3.0.1`
* Updated to `org.apache.spark:spark-sql:2.4.5` to `3.0.1`
* Updated `com.exasol:exasol-jdbc:7.0.0` to `7.0.7`
* Updated `org.apache.spark:spark-core:2.4.5` to `3.0.1`
* Updated `org.apache.spark:spark-sql:2.4.5` to `3.0.1`

### Test Dependency Updates

* Updated to `org.scalatest:scalatest:3.2.2` to `3.2.3`
* Updated to `org.mockito:mockito-core:3.5.13` to `3.7.7`
* Updated to `org.testcontainers:jdbc:1.14.3` to `1.15.2`
* Updated to `com.dimafeng:testcontainers-scala:0.38.4` to `0.39.1`
* Updated to `com.holdenkarau:spark-testing-base:2.4.5_0.14.0` to `3.0.1_1.0.0`
* Added `com.exasol:exasol-testcontainers:3.5.0`
* Removed `org.testcontainers:jdbc`
* Removed `com.dimafeng:testcontainers-scala`
* Updated `org.scalatest:scalatest:3.2.2` to `3.2.3`
* Updated `org.mockito:mockito-core:3.5.13` to `3.7.7`
* Updated `com.holdenkarau:spark-testing-base:2.4.5_0.14.0` to `3.0.1_1.0.0`

### Plugin Updates

* Updated to `sbt.version:1.3.13` to `1.7.7`
* Updated to `org.wartremover:sbt-wartremover:2.4.10` to `2.4.13`
* Updated to `org.wartremover:sbt-wartremover-contrib:1.3.8` to `1.3.11`
* Updated to `com.jsuereth:sbt-pgp:2.0.1` to `2.1.1`
* Updated to `org.xerial.sbt:sbt-sonatype:3.9.4` to `3.9.5`
* Updated `sbt.version:1.3.13` to `1.4.7`
* Updated `org.wartremover:sbt-wartremover:2.4.10` to `2.4.13`
* Updated `org.wartremover:sbt-wartremover-contrib:1.3.8` to `1.3.11`
* Updated `com.jsuereth:sbt-pgp:2.0.1` to `2.1.1`
* Updated `org.xerial.sbt:sbt-sonatype:3.9.4` to `3.9.5`
* Removed `io.get-coursier:sbt-coursier`
4 changes: 0 additions & 4 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@ object Dependencies {
private val ScalaTestVersion = "3.2.3"
private val ScalaTestMockitoVersion = "1.0.0-M2"
private val MockitoVersion = "3.7.7"
private val ContainersJdbcVersion = "1.15.2"
private val ContainersScalaVersion = "0.39.1"
private val ExasolTestContainersVersion = "3.5.0"

private val sparkCurrentVersion =
Expand All @@ -37,8 +35,6 @@ object Dependencies {
"org.scalatest" %% "scalatest" % ScalaTestVersion,
"org.scalatestplus" %% "scalatestplus-mockito" % ScalaTestMockitoVersion,
"org.mockito" % "mockito-core" % MockitoVersion,
"org.testcontainers" % "jdbc" % ContainersJdbcVersion,
"com.dimafeng" %% "testcontainers-scala" % ContainersScalaVersion,
"com.holdenkarau" %% "spark-testing-base" % SparkTestingBaseVersion,
"com.exasol" % "exasol-testcontainers" % ExasolTestContainersVersion,
).map(_ % Test)
Expand Down
2 changes: 1 addition & 1 deletion scripts/ci.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ set -o errtrace -o nounset -o pipefail -o errexit
BASE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )"/.. && pwd )"
cd "$BASE_DIR"

MAIN_SCALA_VERSION=2.12.13
MAIN_SCALA_VERSION=2.12.12
MAIN_SPARK_VERSION=3.0.1

if [[ -z "${TRAVIS_SCALA_VERSION:-}" ]]; then
Expand Down
2 changes: 1 addition & 1 deletion src/it/scala/com/exasol/spark/ColumnPruningIT.scala
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import com.holdenkarau.spark.testing.DataFrameSuiteBase
/**
* Test only required columns selection from queries.
*/
class ColumnPruningSuite extends BaseIntegrationTest with DataFrameSuiteBase {
class ColumnPruningIT extends BaseIntegrationTest with DataFrameSuiteBase {

test("returns only required columns in query") {
createDummyTable()
Expand Down
2 changes: 1 addition & 1 deletion src/it/scala/com/exasol/spark/LoadIT.scala
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import com.holdenkarau.spark.testing.DataFrameSuiteBase
* Tests for loading data from Exasol query as dataframes using short
* and long source formats.
*/
class LoadSuite extends BaseIntegrationTest with DataFrameSuiteBase {
class LoadIT extends BaseIntegrationTest with DataFrameSuiteBase {

test("runs dataframe show action successfully") {
createDummyTable()
Expand Down
2 changes: 1 addition & 1 deletion src/it/scala/com/exasol/spark/PredicatePushdownIT.scala
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import com.holdenkarau.spark.testing.DataFrameSuiteBase
/**
* Test where clause generation for user queries.
*/
class PredicatePushdownSuite extends BaseIntegrationTest with DataFrameSuiteBase {
class PredicatePushdownIT extends BaseIntegrationTest with DataFrameSuiteBase {

test("with where clause build from filters: filter") {
createDummyTable()
Expand Down
2 changes: 1 addition & 1 deletion src/it/scala/com/exasol/spark/ReservedKeywordsIT.scala
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import com.holdenkarau.spark.testing.DataFrameSuiteBase
/**
* Tests for quering an Exasol tables with reserved keywords.
*/
class ReservedKeywordsSuite extends BaseIntegrationTest with DataFrameSuiteBase {
class ReservedKeywordsIT extends BaseIntegrationTest with DataFrameSuiteBase {

val SCHEMA: String = "RESERVED_KEYWORDS"
val TABLE: String = "TEST_TABLE"
Expand Down
2 changes: 1 addition & 1 deletion src/it/scala/com/exasol/spark/SaveIT.scala
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import org.scalatest.BeforeAndAfterEach
/**
* Integration tests for saving Spark DataFrames into Exasol tables.
*/
class SaveSuite extends BaseIntegrationTest with BeforeAndAfterEach with DataFrameSuiteBase {
class SaveIT extends BaseIntegrationTest with BeforeAndAfterEach with DataFrameSuiteBase {

private[this] val tableName = s"$EXA_SCHEMA.$EXA_TABLE"

Expand Down
2 changes: 1 addition & 1 deletion src/it/scala/com/exasol/spark/TypesIT.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ package com.exasol.spark
import com.holdenkarau.spark.testing.DataFrameSuiteBase
import org.apache.spark.sql.types._

class TypesSuite extends BaseIntegrationTest with DataFrameSuiteBase {
class TypesIT extends BaseIntegrationTest with DataFrameSuiteBase {

test("converts Exasol types to Spark") {
createAllTypesTable()
Expand Down
8 changes: 4 additions & 4 deletions src/main/scala/com/exasol/spark/util/Types.scala
Original file line number Diff line number Diff line change
Expand Up @@ -119,8 +119,6 @@ object Types extends Logging {
// Others
case java.sql.Types.ROWID => LongType
case java.sql.Types.STRUCT => StringType
case _ =>
throw new IllegalArgumentException(s"Received an unsupported SQL type $sqlType")
}

/**
Expand Down Expand Up @@ -154,7 +152,8 @@ object Types extends Logging {
case TimestampType => java.sql.Types.TIMESTAMP
case DateType => java.sql.Types.DATE
case _: DecimalType => java.sql.Types.DECIMAL
case _ => throw new RuntimeException(s"Unsupported Spark data type $dataType!")
case _ =>
throw new IllegalArgumentException(s"Unsupported Spark data type $dataType!")
}

/**
Expand All @@ -177,7 +176,8 @@ object Types extends Logging {
case StringType => "CLOB"
case DateType => "DATE"
case TimestampType => "TIMESTAMP"
case _ => throw new RuntimeException(s"Unsupported Spark data type $dataType!")
case _ =>
throw new IllegalArgumentException(s"Unsupported Spark data type $dataType!")
}

/**
Expand Down
54 changes: 54 additions & 0 deletions src/test/scala/com/exasol/spark/util/TypesSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,60 @@ class TypesSuite extends AnyFunSuite with Matchers {
assert(exasolTypeFromSparkDataType(DecimalType.apply(38, 37)) === "DECIMAL(36,36)")
}

test("Spark types to JDBC types conversion") {
val data: Map[DataType, Int] = Map(
IntegerType -> java.sql.Types.INTEGER,
LongType -> java.sql.Types.BIGINT,
DoubleType -> java.sql.Types.DOUBLE,
FloatType -> java.sql.Types.FLOAT,
ShortType -> java.sql.Types.SMALLINT,
ByteType -> java.sql.Types.TINYINT,
BooleanType -> java.sql.Types.BIT,
StringType -> java.sql.Types.VARCHAR,
BinaryType -> java.sql.Types.BLOB,
TimestampType -> java.sql.Types.TIMESTAMP,
DateType -> java.sql.Types.DATE,
DecimalType(18, 0) -> java.sql.Types.DECIMAL
)
data.foreach {
case (given, expected) =>
assert(jdbcTypeFromSparkDataType(given) === expected)
}
}

test("Spark types to JDBC types conversion throws for unsupported type") {
val thrown = intercept[IllegalArgumentException] {
jdbcTypeFromSparkDataType(MapType(StringType, IntegerType))
}
assert(thrown.getMessage().contains("Unsupported Spark data type"))
}

test("Spark types to Exasol types conversion") {
val data: Map[DataType, String] = Map(
ShortType -> "SMALLINT",
ByteType -> "TINYINT",
IntegerType -> "INTEGER",
LongType -> "BIGINT",
DoubleType -> "DOUBLE",
FloatType -> "FLOAT",
BooleanType -> "BOOLEAN",
StringType -> "CLOB",
DateType -> "DATE",
TimestampType -> "TIMESTAMP"
)
data.foreach {
case (given, expected) =>
assert(exasolTypeFromSparkDataType(given) === expected)
}
}

test("Spark types to Exasol types conversion throws for unsupported type") {
val thrown = intercept[IllegalArgumentException] {
exasolTypeFromSparkDataType(ArrayType(FloatType))
}
assert(thrown.getMessage().contains("Unsupported Spark data type"))
}

test("test of Int type conversion") {
assert(createSparkTypeFromSQLType(java.sql.Types.TINYINT, 0, 0, false) === ShortType)
assert(createSparkTypeFromSQLType(java.sql.Types.SMALLINT, 0, 0, false) === ShortType)
Expand Down

0 comments on commit 78e4a2f

Please sign in to comment.