Skip to content

Commit

Permalink
Merge branch 'main' into feature/use-default-jdbc-port-when-single-node
Browse files Browse the repository at this point in the history
  • Loading branch information
morazow committed Jun 15, 2021
2 parents a824ba2 + b280ab3 commit c176f0b
Show file tree
Hide file tree
Showing 46 changed files with 1,400 additions and 1,258 deletions.
11 changes: 5 additions & 6 deletions .travis.yml
Expand Up @@ -16,18 +16,17 @@ scala:
- 2.12.12

env:
- SPARK_VERSION="2.4.5" EXASOL_DOCKER_VERSION="6.2.12-d1"
- SPARK_VERSION="2.4.5" EXASOL_DOCKER_VERSION="7.0.4"
- SPARK_VERSION="3.0.1" EXASOL_DOCKER_VERSION="6.2.12-d1"
- SPARK_VERSION="3.0.1" EXASOL_DOCKER_VERSION="7.0.4"
- SPARK_VERSION="2.4.5" EXASOL_DOCKER_VERSION="6.2.15-d1"
- SPARK_VERSION="2.4.5" EXASOL_DOCKER_VERSION="7.0.10"
- SPARK_VERSION="3.0.1" EXASOL_DOCKER_VERSION="6.2.15-d1"
- SPARK_VERSION="3.0.1" EXASOL_DOCKER_VERSION="7.0.10"

before_install:
- git fetch --tags
- docker pull "exasol/docker-db:$EXASOL_DOCKER_VERSION"
- docker network create -d bridge --subnet 192.168.0.0/24 --gateway 192.168.0.1 dockernet

script:
- travis_wait 30 ./scripts/ci.sh
- ./scripts/ci.sh

after_success:
- bash <(curl -s https://codecov.io/bash)
Expand Down
2 changes: 2 additions & 0 deletions README.md
Expand Up @@ -42,6 +42,7 @@ versions of Spark, we are planning to change to the newer JVM versions.
| Dependency | Purpose | License |
|---------------------------------------------|-----------------------------------------------------------------|----------------------|
| [Exasol JDBC][exasol-jdbc-link] | Accessing Exasol using JDBC and sub-connections | MIT License |
| [Exasol SQL Statement Builder][sql-ssb-link]| Building SQL statements safely without string concatenation | MIT License |
| [Spark Core][spark] | Apache Spark core libraries for optimized computation | Apache License 2.0 |
| [Spark SQL][spark-sql-link] | Apache Spark higher-level SQL and Dataframe interface libraries | Apache License 2.0 |

Expand Down Expand Up @@ -88,6 +89,7 @@ These plugins help with project development.
[exasol]: https://www.exasol.com/en/
[spark]: https://spark.apache.org/
[exasol-jdbc-link]: https://www.exasol.com/portal/display/DOWNLOAD/Exasol+Download+Section
[sql-ssb-link]: https://github.com/exasol/sql-statement-builder
[spark-sql-link]: https://spark.apache.org/sql/
[scalatest-link]: http://www.scalatest.org/
[scalatestplus-link]: https://github.com/scalatest/scalatestplus-mockito
Expand Down
1 change: 1 addition & 0 deletions doc/changes/changelog.md
@@ -1,5 +1,6 @@
# Releases

* [1.1.0](changes_1.1.0.md)
* [1.0.0](changes_1.0.0.md)
* [0.3.2](changes_0.3.2.md)
* [0.3.1](changes_0.3.1.md)
Expand Down
52 changes: 37 additions & 15 deletions doc/changes/changes_1.0.0.md
@@ -1,33 +1,55 @@
# Spark Exasol Connector 1.0.0, released 2020-12-DD
# Spark Exasol Connector 1.0.0, released 2021-03-09

Code: Fixed bugs related to identifiers

## Summary

In this release, we fixed bugs related to quoted identifiers and reserved
keywords. In addition, we refactored the integration test and SQL generation
using Exasol SQL Statement Builder.

## Features / Improvements

## Bug Fixes

* #14: Fixed issue with using Exasol reserved keywords in Spark queries (PR #88).
* #39: Fixed issue related to quoted columns in Spark queries (PR #88).

## Refactoring

* #40: Added Exasol Testcontainers, refactored test environment (PR #87).
* #84: Added Exasol SQL Statement Builder for building SQL queries (PR #88).
* #89: Added missing Exasol predicates (PR #91).

## Documentation

* #85: Updated documentation with configuration for the Databricks cluster (PR #86)
* #85: Updated documentation with configuration for the Databricks cluster (PR #86).

## Dependency Updates

### Runtime Dependency Updates

* Updated to `com.exasol:exasol-jdbc:7.0.4` (was `7.0.0`)
* Updated to `org.apache.spark:spark-core:3.0.1` (was `2.4.5`)
* Updated to `org.apache.spark:spark-sql:3.0.1` (was `2.4.5`)
* Added `com.exasol:sql-statement-builder:4.4.0`
* Updated `com.exasol:exasol-jdbc:7.0.0` to `7.0.7`
* Updated `org.apache.spark:spark-core:2.4.5` to `3.0.1`
* Updated `org.apache.spark:spark-sql:2.4.5` to `3.0.1`

### Test Dependency Updates

* Updated to `org.scalatest:scalatest:3.2.2` (was `3.2.2`)
* Updated to `org.testcontainers:jdbc:1.15.0` (was `1.14.3`)
* Updated to `com.holdenkarau:spark-testing-base:3.0.1_1.0.0` (was `2.4.5_0.14.0)
* Updated to `org.mockito:mockito-core:3.6.28` (was `3.5.13`)
* Updated to `com.dimafeng:testcontainers-scala:0.38.7` (was `0.38.4`)
* Added `com.exasol:exasol-testcontainers:3.5.1`
* Added `com.exasol:test-db-builder-java:3.0.0`
* Added `com.exasol:hamcrest-resultset-matcher:1.4.0`
* Removed `org.testcontainers:jdbc`
* Removed `com.dimafeng:testcontainers-scala`
* Updated `org.scalatest:scalatest:3.2.2` to `3.2.5`
* Updated `org.mockito:mockito-core:3.5.13` to `3.8.0`
* Updated `com.holdenkarau:spark-testing-base:2.4.5_0.14.0` to `3.0.1_1.0.0`

### Plugin Updates

* Updated to `sbt.version:1.4.4` (was `1.3.13`)
* Updated to `org.wartremover:sbt-wartremover:2.4.13` (was `2.4.10`)
* Updated to `org.wartremover:sbt-wartremover-contrib:1.3.11` (was `1.3.8`)
* Updated to `com.jsuereth:sbt-pgp:2.0.2` (was `2.0.1`)
* Updated to `org.xerial.sbt:sbt-sonatype:3.9.5` (was `3.9.4`)
* Updated `sbt.version:1.3.13` to `1.4.7`
* Updated `org.wartremover:sbt-wartremover:2.4.10` to `2.4.13`
* Updated `org.wartremover:sbt-wartremover-contrib:1.3.8` to `1.3.11`
* Updated `com.jsuereth:sbt-pgp:2.0.1` to `2.1.1`
* Updated `org.xerial.sbt:sbt-sonatype:3.9.4` to `3.9.5`
* Removed `io.get-coursier:sbt-coursier`
34 changes: 34 additions & 0 deletions doc/changes/changes_1.1.0.md
@@ -0,0 +1,34 @@
# Spark Exasol Connector 1.1.0, released 2021-06-04

Code: Added Support for Java 8 Runtime

## Summary

This release adds support for Java 8 runtime environments. We updated `sql-statement-builder` that was released for Java 11 to `sql-statement-builder-java8` version.

## Features

* #92: Added support for Java 8.

## Dependency Updates

### Runtime Dependency Updates

* Removed `com.exasol:sql-statement-builder:4.4.0`
* Added `com.exasol:sql-statement-builder-java8:4.4.1`

### Test Dependency Updates

* Updated `com.exasol:exasol-testcontainers:3.5.1` to `3.5.3`
* Updated `com.exasol:test-db-builder-java:3.1.0` to `3.1.1`
* Updated `org.scalatest:scalatest:3.2.5` to `3.2.9`
* Updated `org.mockito:mockito-core:3.8.0` to `3.11.0`

### Plugin Updates

* Updated `org.wartremover:sbt-wartremover:2.4.13` to `2.4.15`
* Updated `org.wartremover:sbt-wartremover-contrib:1.3.11` to `1.3.12`
* Updated `org.scoverage:sbt-scoverage:1.6.1` to `1.8.2`
* Updated `com.timushev.sbt:sbt-updates:0.5.1` to `0.5.3`
* Updated `org.xerial.sbt:sbt-sonatype:3.9.5` to `3.9.7`
* Updated `com.typesafe.sbt:sbt-git:1.0.0` to `1.0.1`
16 changes: 3 additions & 13 deletions doc/development/developer_guide.md
Expand Up @@ -5,20 +5,10 @@ Please read the general [developer guide for the Scala projects][dev-guide].
## Integration Tests

The integration tests are run using [Docker][docker] containers. The tests use
[exasol/docker-db][exa-docker-db], [testcontainers][testcontainers] and
[exasol-testcontainers][exa-testcontainers] and
[spark-testing-base][spark-testing-base].

To run integration tests, a separate docker network should be created first:

```bash
docker network create -d bridge --subnet 192.168.0.0/24 --gateway 192.168.0.1 dockernet
```

The docker network is required since we connect to the Exasol docker container
using an internal IPv4 address.

[dev-guide]: https://github.com/exasol/import-export-udf-common-scala/blob/master/doc/development/developer_guide.md
[docker]: https://www.docker.com/
[exa-docker-db]: https://hub.docker.com/r/exasol/docker-db/
[testcontainers]: https://www.testcontainers.org/
[exa-testcontainers]: https://github.com/exasol/exasol-testcontainers/
[spark-testing-base]: https://github.com/holdenk/spark-testing-base
[dev-guide]: https://github.com/exasol/import-export-udf-common-scala/blob/master/doc/development/developer_guide.md
20 changes: 12 additions & 8 deletions project/Dependencies.scala
Expand Up @@ -7,13 +7,15 @@ object Dependencies {

// Versions
private val DefaultSparkVersion = "3.0.1"
private val ExasolJdbcVersion = "7.0.4"
private val ExasolJdbcVersion = "7.0.7"
private val ExasolSQLStmtBuilderVersion = "4.4.1"

private val ScalaTestVersion = "3.2.3"
private val ScalaTestVersion = "3.2.9"
private val ScalaTestMockitoVersion = "1.0.0-M2"
private val MockitoVersion = "3.6.28"
private val ContainersJdbcVersion = "1.15.0"
private val ContainersScalaVersion = "0.38.7"
private val MockitoVersion = "3.11.0"
private val ExasolTestContainersVersion = "3.5.3"
private val ExasolTestDBBuilderVersion = "3.1.1"
private val ExasolHamcrestMatcherVersion = "1.4.0"

private val sparkCurrentVersion =
sys.env.getOrElse("SPARK_VERSION", DefaultSparkVersion)
Expand All @@ -27,6 +29,7 @@ object Dependencies {
/** Core dependencies needed for connector */
private val CoreDependencies: Seq[ModuleID] = Seq(
"com.exasol" % "exasol-jdbc" % ExasolJdbcVersion,
"com.exasol" % "sql-statement-builder-java8" % ExasolSQLStmtBuilderVersion,
"org.apache.spark" %% "spark-core" % sparkCurrentVersion % "provided",
"org.apache.spark" %% "spark-sql" % sparkCurrentVersion % "provided"
)
Expand All @@ -36,9 +39,10 @@ object Dependencies {
"org.scalatest" %% "scalatest" % ScalaTestVersion,
"org.scalatestplus" %% "scalatestplus-mockito" % ScalaTestMockitoVersion,
"org.mockito" % "mockito-core" % MockitoVersion,
"org.testcontainers" % "jdbc" % ContainersJdbcVersion,
"com.dimafeng" %% "testcontainers-scala" % ContainersScalaVersion,
"com.holdenkarau" %% "spark-testing-base" % SparkTestingBaseVersion
"com.holdenkarau" %% "spark-testing-base" % SparkTestingBaseVersion,
"com.exasol" % "exasol-testcontainers" % ExasolTestContainersVersion,
"com.exasol" % "test-db-builder-java" % ExasolTestDBBuilderVersion,
"com.exasol" % "hamcrest-resultset-matcher" % ExasolHamcrestMatcherVersion,
).map(_ % Test)

/** The list of all dependencies for the connector */
Expand Down
1 change: 0 additions & 1 deletion project/Settings.scala
Expand Up @@ -43,7 +43,6 @@ object Settings {
// ScalaFmt settings
scalafmtOnCompile := true,
// Scoverage settings
coverageMinimum := 50,
coverageOutputHTML := true,
coverageOutputXML := true,
coverageFailOnMinimum := false,
Expand Down
2 changes: 1 addition & 1 deletion project/build.properties
@@ -1 +1 @@
sbt.version=1.4.4
sbt.version=1.4.7
14 changes: 7 additions & 7 deletions project/plugins.sbt
@@ -1,10 +1,10 @@
// Adds a `wartremover` a flexible Scala code linting tool
// http://github.com/puffnfresh/wartremover
addSbtPlugin("org.wartremover" % "sbt-wartremover" % "2.4.13")
addSbtPlugin("org.wartremover" % "sbt-wartremover" % "2.4.15")

// Adds Contrib Warts
// http://github.com/wartremover/wartremover-contrib/
addSbtPlugin("org.wartremover" % "sbt-wartremover-contrib" % "1.3.11")
addSbtPlugin("org.wartremover" % "sbt-wartremover-contrib" % "1.3.12")

// Adds a `assembly` task to create a fat JAR with all of its dependencies
// https://github.com/sbt/sbt-assembly
Expand All @@ -16,7 +16,7 @@ addSbtPlugin("com.thoughtworks.sbt-api-mappings" % "sbt-api-mappings" % "3.0.0")

// Adds Scala Code Coverage (Scoverage) used during unit tests
// http://github.com/scoverage/sbt-scoverage
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.8.2")

// Adds SBT Coveralls plugin for uploading Scala code coverage to
// https://coveralls.io
Expand All @@ -25,7 +25,7 @@ addSbtPlugin("org.scoverage" % "sbt-coveralls" % "1.2.7")

// Adds a `dependencyUpdates` task to check Maven repositories for dependency updates
// http://github.com/rtimush/sbt-updates
addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.5.1")
addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.5.3")

// Adds a `scalafmt` task for automatic source code formatting
// https://github.com/lucidsoftware/neo-sbt-scalafmt
Expand All @@ -41,15 +41,15 @@ addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.9.2")

// Adds a `sonatype` release tasks
// https://github.com/xerial/sbt-sonatype
addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.5")
addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.7")

// Adds a `gnu-pgp` plugin
// https://github.com/sbt/sbt-pgp
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.2")
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.1.1")

// Adds a `git` plugin
// https://github.com/sbt/sbt-git
addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.1")

// Setup this and project/project/plugins.sbt for formatting project/*.scala files with scalafmt
inThisBuild(
Expand Down
2 changes: 1 addition & 1 deletion project/project/plugins.sbt
Expand Up @@ -8,4 +8,4 @@ addSbtPlugin("com.lucidchart" % "sbt-scalafmt-coursier" % "1.16")

// Used to get updates for plugins
// see https://github.com/rtimush/sbt-updates/issues/10
addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.5.0")
addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.5.3")
17 changes: 12 additions & 5 deletions sbtx
Expand Up @@ -34,8 +34,8 @@

set -o pipefail

declare -r sbt_release_version="1.4.4"
declare -r sbt_unreleased_version="1.4.4"
declare -r sbt_release_version="1.4.7"
declare -r sbt_unreleased_version="1.4.7"

declare -r latest_213="2.13.4"
declare -r latest_212="2.12.12"
Expand All @@ -48,7 +48,7 @@ declare -r buildProps="project/build.properties"

declare -r sbt_launch_ivy_release_repo="https://repo.typesafe.com/typesafe/ivy-releases"
declare -r sbt_launch_ivy_snapshot_repo="https://repo.scala-sbt.org/scalasbt/ivy-snapshots"
declare -r sbt_launch_mvn_release_repo="https://repo.scala-sbt.org/scalasbt/maven-releases"
declare -r sbt_launch_mvn_release_repo="https://repo1.maven.org/maven2"
declare -r sbt_launch_mvn_snapshot_repo="https://repo.scala-sbt.org/scalasbt/maven-snapshots"

declare -r default_jvm_opts_common="-Xms512m -Xss2m -XX:MaxInlineLevel=18"
Expand Down Expand Up @@ -167,7 +167,7 @@ make_url() {
0.10.*) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;;
0.11.[12]) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;;
0.*) echo "$base/org.scala-sbt/sbt-launch/$version/sbt-launch.jar" ;;
*) echo "$base/org/scala-sbt/sbt-launch/$version/sbt-launch-${version}.jar" ;;
*) echo "$base/org/scala-sbt/sbt-launch/$version/sbt-launch.jar" ;;
esac
}

Expand Down Expand Up @@ -247,11 +247,18 @@ java_version() {
echo "$version"
}

is_apple_silicon() { [[ "$(uname -s)" == "Darwin" && "$(uname -m)" == "arm64" ]]; }

# MaxPermSize critical on pre-8 JVMs but incurs noisy warning on 8+
default_jvm_opts() {
local -r v="$(java_version)"
if [[ $v -ge 10 ]]; then
echo "$default_jvm_opts_common -XX:+UnlockExperimentalVMOptions -XX:+UseJVMCICompiler"
if is_apple_silicon; then
# As of Dec 2020, JVM for Apple Silicon (M1) doesn't support JVMCI
echo "$default_jvm_opts_common"
else
echo "$default_jvm_opts_common -XX:+UnlockExperimentalVMOptions -XX:+UseJVMCICompiler"
fi
elif [[ $v -ge 8 ]]; then
echo "$default_jvm_opts_common"
else
Expand Down

0 comments on commit c176f0b

Please sign in to comment.