Skip to content

Commit

Permalink
upgrade scala & spark version
Browse files Browse the repository at this point in the history
circleCI test switch to local mode

remove hack to change system env

AccSuite now execute reliably
  • Loading branch information
tribbloid committed Oct 7, 2023
1 parent 681329c commit ee6aa8b
Show file tree
Hide file tree
Showing 11 changed files with 47 additions and 53 deletions.
10 changes: 5 additions & 5 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,22 +17,22 @@ jobs:
echo "Starting ..."
- restore_cache:
keys:
- profile-{{ checksum "./dev/profiles/apache-stable/.common.sh" }}
- profile-{{ checksum "./dev/profiles/apache-local/.common.sh" }}
- run:
name: Prepare
command: |
./dev/CI-apache-stable.sh prepare
./dev/CI-apache-local.sh prepare
- save_cache:
key: profile-{{ checksum "./dev/profiles/apache-stable/.common.sh" }}
key: profile-{{ checksum "./dev/profiles/apache-local/.common.sh" }}
paths:
- ~/.ci
- run:
name: Run
command: |
./dev/CI-apache-stable.sh
./dev/CI-apache-local.sh
workflows:
main-stable: # This is the name of the workflow, feel free to change it to better match your workflow.
main-local: # This is the name of the workflow, feel free to change it to better match your workflow.
# Inside the workflow, you define the jobs you want to run.
# For more details on extending your workflow, see the configuration docs: https://circleci.com/docs/2.0/configuration-reference/#workflows
jobs:
Expand Down
2 changes: 1 addition & 1 deletion dev/CI-apache-latest.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@

CRDIR="$(cd "`dirname "$0"`"; pwd)"

"$CRDIR"/CI/main.sh apache-latest ${@}
"$CRDIR"/CI/main.sh apache-latest "${@}"
7 changes: 7 additions & 0 deletions dev/CI-apache-local.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
#!/usr/bin/env bash

# TODO: this file is merely kept for backward compatibility

CRDIR="$(cd "`dirname "$0"`"; pwd)"

"$CRDIR"/CI/main.sh apache-local "${@}"
2 changes: 1 addition & 1 deletion dev/CI-apache-stable.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@

CRDIR="$(cd "`dirname "$0"`"; pwd)"

"$CRDIR"/CI/main.sh apache-stable ${@}
"$CRDIR"/CI/main.sh apache-stable "${@}"
4 changes: 2 additions & 2 deletions dev/profiles/apache-latest/.common.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/bin/env bash

SPARK_NAME="${SPARK:-spark-3.4.0}"
SPARK_NAME="${SPARK:-spark-3.5.0}"
SPARK_DIR_ROOT="$HOME/.ci/spark-dist"

SPARK_DIR_NAME="$SPARK_NAME"-bin-hadoop3-scala2.13
Expand All @@ -13,4 +13,4 @@ tar -xzf "$SPARK_DIR_ROOT/$SPARK_DIR_NAME".tgz -C "$SPARK_DIR_ROOT"

export SPARK_HOME="$SPARK_DIR_ROOT/$SPARK_DIR_NAME"

export BUILD_PROFILES=("-PsparkVersion=3.4.0" "-PscalaVersion=2.13.10")
export BUILD_PROFILES=("-PsparkVersion=3.5.0" "-PscalaVersion=2.13.12")
16 changes: 16 additions & 0 deletions dev/profiles/apache-local/.common.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
#!/usr/bin/env bash

SPARK_NAME="${SPARK:-spark-3.5.0}"
SPARK_DIR_ROOT="$HOME/.ci/spark-dist"

SPARK_DIR_NAME="$SPARK_NAME"-bin-hadoop3-scala2.13

SPARK_URL="http://archive.apache.org/dist/spark/${SPARK_NAME}/${SPARK_DIR_NAME}.tgz"

# Download Spark
wget -N "$SPARK_URL" -P "$SPARK_DIR_ROOT"
tar -xzf "$SPARK_DIR_ROOT/$SPARK_DIR_NAME".tgz -C "$SPARK_DIR_ROOT"

#export SPARK_HOME="$SPARK_DIR_ROOT/$SPARK_DIR_NAME"

export BUILD_PROFILES=("-PsparkVersion=3.5.0" "-PscalaVersion=2.13.12")
4 changes: 2 additions & 2 deletions dev/profiles/apache-stable/.common.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/bin/env bash

SPARK_NAME="${SPARK:-spark-3.4.0}"
SPARK_NAME="${SPARK:-spark-3.5.0}"
SPARK_DIR_ROOT="$HOME/.ci/spark-dist"

SPARK_DIR_NAME="$SPARK_NAME"-bin-hadoop3-scala2.13
Expand All @@ -13,4 +13,4 @@ tar -xzf "$SPARK_DIR_ROOT/$SPARK_DIR_NAME".tgz -C "$SPARK_DIR_ROOT"

export SPARK_HOME="$SPARK_DIR_ROOT/$SPARK_DIR_NAME"

export BUILD_PROFILES=("-PsparkVersion=3.4.0" "-PscalaVersion=2.13.10")
export BUILD_PROFILES=("-PsparkVersion=3.5.0" "-PscalaVersion=2.13.12")
4 changes: 2 additions & 2 deletions gradle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@
scalaGroup=org.scala-lang

#scalaVersion=2.12.17
scalaVersion=2.13.10
scalaVersion=2.13.12

noUav

sparkVersion=3.4.0
sparkVersion=3.5.0

org.gradle.parallel=true
org.gradle.caching=true
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
package com.tribbloids.spookystuff.metrics

import com.tribbloids.spookystuff.testutils.FunSpecx
import com.tribbloids.spookystuff.testutils.{FunSpecx, TestHelper}
import org.apache.spark.util.LongAccumulator
import org.scalatest.BeforeAndAfterEach

class AccSuite extends FunSpecx {
class AccSuite extends FunSpecx with BeforeAndAfterEach {

// TODO: it is not working
// it("FromType") {
Expand All @@ -13,6 +14,10 @@ class AccSuite extends FunSpecx {
// assert(acc.value == EventTimeStats.zero)
// }

override protected def beforeEach(): Unit = {
TestHelper.TestSparkSession
}

it("Simple") {
Acc.Simple(new LongAccumulator)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,21 +80,4 @@ object ConfUtils {
}
}

/**
* change UnmodifiableMap System.getenv() for tests NOT stable! Only for testing
*/
def overrideEnv(key: String, value: String): Unit = {
val field = System.getenv().getClass.getDeclaredField("m")
field.setAccessible(true)
val map = field.get(System.getenv()).asInstanceOf[java.util.Map[java.lang.String, java.lang.String]]
map.put(key, value)

Thread.sleep(500)

{
// validation
val actual = System.getenv(key)
require(value == actual, s"Set environment variable failed: expected `$value`, actual `$actual`")
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import com.tribbloids.spookystuff.utils.lifespan.Cleanable.Lifespan
import com.tribbloids.spookystuff.utils.lifespan.LocalCleanable
import com.tribbloids.spookystuff.utils.{CommonConst, CommonUtils, ConfUtils}
import org.apache.hadoop.fs.FileUtil
import org.apache.spark.launcher.InProcessLauncher
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.sql.{SQLContext, SparkSession}
import org.apache.spark.{SparkConf, SparkContext, SparkEnv, SparkException}
Expand Down Expand Up @@ -176,11 +177,6 @@ abstract class TestHelper extends LocalCleanable {
Math.min(n * MEMORY_PER_CORE, cap)
}

@transient lazy val envOverrides: Map[String, String] = Map(
"SPARK_SCALA_VERSION" -> CommonUtils.scalaBinaryVersion
// "SPARK_LOCAL_HOSTNAME" -> "localhost"
)

case object CoreSettings {

lazy val masterEnv: String = System.getenv("SPARK_MASTER")
Expand All @@ -195,19 +191,6 @@ abstract class TestHelper extends LocalCleanable {
masterStr
} else {

if (envOverrides.nonEmpty) {
LoggerFactory
.getLogger(this.getClass)
.warn(
"overriding system variables ... this may be unstable for some JVM"
)

envOverrides.foreach {
case (k, v) =>
ConfUtils.overrideEnv(k, v)
}
}

val masterStr =
s"local-cluster[${clusterSizeOpt.get},${numCoresPerWorkerOpt.get},${executorMemoryOpt.get}]"
println(s"initializing SparkContext in local-cluster simulation mode:" + masterStr)
Expand All @@ -230,10 +213,10 @@ abstract class TestHelper extends LocalCleanable {
base1
} else {
base1 ++ Map(
"spark.home" -> SPARK_HOME,
"spark.home" -> SPARK_HOME
// "spark.executor.memory" -> (executorMemoryOpt.get + "m"),
"spark.driver.extraClassPath" -> sys.props("java.class.path"),
"spark.executor.extraClassPath" -> sys.props("java.class.path")
// "spark.driver.extraClassPath" -> sys.props("java.class.path"),
// "spark.executor.extraClassPath" -> sys.props("java.class.path")
)
}

Expand Down

0 comments on commit ee6aa8b

Please sign in to comment.