Skip to content

Commit

Permalink
Spark 3.4.1 and drop EOL Spark versions 3.0 and 3.1 (#430)
Browse files Browse the repository at this point in the history
* Spark 3.4.0

* Drop EOL spark versions

* Upgrade 3.2 to 3.2.4

* Align scala version with what is used in spark

* Used 2.12.12 in scala ci for sbt-scoverage compatibility

* rebuild

* Use Spark 3.4.1

* Trigger CI

* Apply suggestions from code review

Co-authored-by: Enrico Minack <github@enrico.minack.dev>

---------

Co-authored-by: Enrico Minack <github@enrico.minack.dev>
  • Loading branch information
eejbyfeldt and EnricoMi committed Sep 12, 2023
1 parent a2c1dbc commit 7e2cd1c
Show file tree
Hide file tree
Showing 5 changed files with 20 additions and 24 deletions.
15 changes: 6 additions & 9 deletions .github/workflows/python-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,14 @@ jobs:
fail-fast: false
matrix:
include:
- spark-version: 3.3.2
scala-version: 2.12.12
- spark-version: 3.4.1
scala-version: 2.12.17
python-version: 3.9
- spark-version: 3.2.3
scala-version: 2.12.12
- spark-version: 3.3.3
scala-version: 2.12.15
python-version: 3.9
- spark-version: 3.1.3
scala-version: 2.12.12
python-version: 3.9
- spark-version: 3.0.3
scala-version: 2.12.12
- spark-version: 3.2.4
scala-version: 2.12.15
python-version: 3.9
runs-on: ubuntu-22.04
env:
Expand Down
18 changes: 9 additions & 9 deletions .github/workflows/scala-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,18 @@ jobs:
fail-fast: false
matrix:
include:
- spark-version: 3.3.2
scala-version: 2.13.10
- spark-version: 3.3.2
- spark-version: 3.4.1
scala-version: 2.13.8
- spark-version: 3.4.1
scala-version: 2.12.12
- spark-version: 3.2.3
- spark-version: 3.3.3
scala-version: 2.13.8
- spark-version: 3.3.3
scala-version: 2.12.12
- spark-version: 3.2.3
scala-version: 2.13.10
- spark-version: 3.1.3
scala-version: 2.12.12
- spark-version: 3.0.3
- spark-version: 3.2.4
scala-version: 2.12.12
- spark-version: 3.2.4
scala-version: 2.13.5
runs-on: ubuntu-22.04
env:
# define Java options for both official sbt and sbt-extras
Expand Down
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ RUN apt-get update && \
apt-get clean

# Install Spark and update env variables.
ENV SCALA_VERSION 2.12.15
ENV SPARK_VERSION "3.3.2"
ENV SCALA_VERSION 2.12.17
ENV SPARK_VERSION "3.4.1"
ENV SPARK_BUILD "spark-${SPARK_VERSION}-bin-hadoop3.2"
ENV SPARK_BUILD_URL "https://dist.apache.org/repos/dist/release/spark/spark-${SPARK_VERSION}/${SPARK_BUILD}.tgz"
RUN wget --quiet "$SPARK_BUILD_URL" -O /tmp/spark.tgz && \
Expand Down
5 changes: 2 additions & 3 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,12 @@ import ReleaseTransformations._

resolvers += "Spark snapshot repository" at "https://repository.apache.org/snapshots/"

val sparkVer = sys.props.getOrElse("spark.version", "3.3.2")
val sparkVer = sys.props.getOrElse("spark.version", "3.4.1")
val sparkBranch = sparkVer.substring(0, 3)
val defaultScalaVer = sparkBranch match {
case "3.4" => "2.12.17"
case "3.3" => "2.12.15"
case "3.2" => "2.12.15"
case "3.1" => "2.12.15"
case "3.0" => "2.12.15"
case _ => throw new IllegalArgumentException(s"Unsupported Spark version: $sparkVer.")
}
val scalaVer = sys.props.getOrElse("scala.version", defaultScalaVer)
Expand Down
2 changes: 1 addition & 1 deletion dev/release.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def verify(prompt, interactive):
@click.option("--publish-docs", type=bool, default=PUBLISH_DOCS_DEFAULT, show_default=True,
help="Publish docs to github-pages.")
@click.option("--spark-version", multiple=True, show_default=True,
default=["3.0.3", "3.1.3", "3.2.3", "3.3.2"])
default=["3.2.4", "3.3.3", "3.4.1"])
def main(release_version, next_version, publish_to, no_prompt, git_remote, publish_docs,
spark_version):
interactive = not no_prompt
Expand Down

0 comments on commit 7e2cd1c

Please sign in to comment.