diff --git a/.github/workflows/scala_tests.yml b/.github/workflows/scala_tests.yml index 6e86aac..5bbe1dd 100644 --- a/.github/workflows/scala_tests.yml +++ b/.github/workflows/scala_tests.yml @@ -23,8 +23,8 @@ jobs: run: sbt +package - uses: actions/upload-artifact@v2 with: - name: sparkMeasure package scala 2.11 - path: /home/runner/work/sparkMeasure/sparkMeasure/target/scala-2.11/*.jar + name: sparkMeasure package scala 2.13 + path: /home/runner/work/sparkMeasure/sparkMeasure/target/scala-2.13/*.jar - uses: actions/upload-artifact@v2 with: name: sparkMeasure package scala 2.12 diff --git a/build.sbt b/build.sbt index fdaeb6f..ce73743 100644 --- a/build.sbt +++ b/build.sbt @@ -3,7 +3,7 @@ name := "spark-measure" version := "0.20-SNAPSHOT" scalaVersion := "2.12.15" -crossScalaVersions := Seq("2.12.15") +crossScalaVersions := Seq("2.12.15", "2.13.8") licenses += ("Apache-2.0", url("http://www.apache.org/licenses/LICENSE-2.0")) diff --git a/src/main/scala/ch/cern/sparkmeasure/PushGateway.scala b/src/main/scala/ch/cern/sparkmeasure/PushGateway.scala index c20ff5a..79c1007 100644 --- a/src/main/scala/ch/cern/sparkmeasure/PushGateway.scala +++ b/src/main/scala/ch/cern/sparkmeasure/PushGateway.scala @@ -147,12 +147,12 @@ case class PushGateway(serverIPnPort: String, metricsJob: String) { if (responseCode != 202) // 202 Accepted, 400 Bad Request logger.error(s"Data sent error, url: '$urlFull', response: $responseCode '$responseMessage'") } catch { - case ioe: java.io.IOException => - println("java.io.IOException") - logger.error(s"Data sent error, url: '$urlFull', " + ioe.getMessage()) case ste: java.net.SocketTimeoutException => println("java.net.SocketTimeoutException") logger.error(s"Data sent error, url: '$urlFull', " + ste.getMessage()) + case ioe: java.io.IOException => + println("java.io.IOException") + logger.error(s"Data sent error, url: '$urlFull', " + ioe.getMessage()) } } diff --git a/src/main/scala/ch/cern/sparkmeasure/StageMetrics.scala b/src/main/scala/ch/cern/sparkmeasure/StageMetrics.scala index b94be78..278302c 100644 --- a/src/main/scala/ch/cern/sparkmeasure/StageMetrics.scala +++ b/src/main/scala/ch/cern/sparkmeasure/StageMetrics.scala @@ -153,7 +153,7 @@ case class StageMetrics(sparkSession: SparkSession) { // into a DataFrame and register it as a view for querying with SQL def createStageMetricsDF(nameTempView: String = "PerfStageMetrics"): DataFrame = { import sparkSession.implicits._ - val resultDF = listenerStage.stageMetricsData.toDF + val resultDF = listenerStage.stageMetricsData.toSeq.toDF() resultDF.createOrReplaceTempView(nameTempView) logger.warn(s"Stage metrics data refreshed into temp view $nameTempView") resultDF diff --git a/src/main/scala/ch/cern/sparkmeasure/TaskMetrics.scala b/src/main/scala/ch/cern/sparkmeasure/TaskMetrics.scala index df17631..1045fcb 100644 --- a/src/main/scala/ch/cern/sparkmeasure/TaskMetrics.scala +++ b/src/main/scala/ch/cern/sparkmeasure/TaskMetrics.scala @@ -127,7 +127,7 @@ case class TaskMetrics(sparkSession: SparkSession) { // into a DataFrame and register it as a view for querying with SQL def createTaskMetricsDF(nameTempView: String = "PerfTaskMetrics"): DataFrame = { import sparkSession.implicits._ - val resultDF = listenerTask.taskMetricsData.toDF + val resultDF = listenerTask.taskMetricsData.toSeq.toDF() resultDF.createOrReplaceTempView(nameTempView) logger.warn(s"Stage metrics data refreshed into temp view $nameTempView") resultDF