From aaadd193c3287f8c2261892cb594ab6c8f0d6407 Mon Sep 17 00:00:00 2001 From: Luca Giovagnoli Date: Wed, 16 Sep 2020 09:10:57 +0200 Subject: [PATCH] Create proper temporary directories --- .../runtime/testkit/TensorFlowTestUtil.scala | 9 ++++++--- .../ml/combust/mleap/runtime/test/TestUtil.scala | 9 ++++++--- .../apache/spark/ml/parity/SparkParityBase.scala | 13 +++++++++---- .../ml/combust/mleap/tensorflow/TestUtil.scala | 9 ++++++--- .../runtime/testing/BundleSerializationUtils.scala | 11 ++++++++--- .../XGBoostClassificationModelParitySpec.scala | 10 ++++++++-- .../mleap/XGBoostRegressionModelParitySpec.scala | 12 +++++++++--- 7 files changed, 52 insertions(+), 21 deletions(-) diff --git a/mleap-databricks-runtime-testkit/src/main/scala/ml/combust/mleap/databricks/runtime/testkit/TensorFlowTestUtil.scala b/mleap-databricks-runtime-testkit/src/main/scala/ml/combust/mleap/databricks/runtime/testkit/TensorFlowTestUtil.scala index 8145e03ab..ad36f488a 100644 --- a/mleap-databricks-runtime-testkit/src/main/scala/ml/combust/mleap/databricks/runtime/testkit/TensorFlowTestUtil.scala +++ b/mleap-databricks-runtime-testkit/src/main/scala/ml/combust/mleap/databricks/runtime/testkit/TensorFlowTestUtil.scala @@ -1,6 +1,7 @@ package ml.combust.mleap.databricks.runtime.testkit import java.io.File +import java.nio.file.{Files, Path} import org.tensorflow @@ -24,9 +25,11 @@ object TensorFlowTestUtil { graph } - val baseDir = new File("/tmp/mleap-tensorflow") - TensorFlowTestUtil.delete(baseDir) - baseDir.mkdirs() + val baseDir = { + val temp: Path = Files.createTempDirectory("mleap-tensorflow") + temp.toFile.deleteOnExit() + temp.toAbsolutePath + } def delete(file: File): Array[(String, Boolean)] = { Option(file.listFiles).map(_.flatMap(f => delete(f))).getOrElse(Array()) :+ (file.getPath -> file.delete) diff --git a/mleap-runtime/src/test/scala/ml/combust/mleap/runtime/test/TestUtil.scala b/mleap-runtime/src/test/scala/ml/combust/mleap/runtime/test/TestUtil.scala index 90e67fbc4..f23c8690f 100644 --- a/mleap-runtime/src/test/scala/ml/combust/mleap/runtime/test/TestUtil.scala +++ b/mleap-runtime/src/test/scala/ml/combust/mleap/runtime/test/TestUtil.scala @@ -1,6 +1,7 @@ package ml.combust.mleap.runtime.test import java.io.File +import java.nio.file.{Files, Path} import ml.combust.mleap.core.regression.DecisionTreeRegressionModel import ml.combust.mleap.core.tree.{ContinuousSplit, InternalNode, LeafNode, Node} @@ -10,9 +11,11 @@ import ml.combust.mleap.core.tree.{ContinuousSplit, InternalNode, LeafNode, Node */ object TestUtil { - val baseDir = new File("/tmp/mleap-runtime") - TestUtil.delete(baseDir) - baseDir.mkdirs() + val baseDir = { + val temp: Path = Files.createTempDirectory("mleap-runtime") + temp.toFile.deleteOnExit() + temp.toAbsolutePath + } def delete(file: File): Array[(String, Boolean)] = { Option(file.listFiles).map(_.flatMap(f => delete(f))).getOrElse(Array()) :+ (file.getPath -> file.delete) diff --git a/mleap-spark-testkit/src/main/scala/org/apache/spark/ml/parity/SparkParityBase.scala b/mleap-spark-testkit/src/main/scala/org/apache/spark/ml/parity/SparkParityBase.scala index f21f9051d..3ca202033 100644 --- a/mleap-spark-testkit/src/main/scala/org/apache/spark/ml/parity/SparkParityBase.scala +++ b/mleap-spark-testkit/src/main/scala/org/apache/spark/ml/parity/SparkParityBase.scala @@ -1,6 +1,7 @@ package org.apache.spark.ml.parity import java.io.File +import java.nio.file.{Files, Path} import org.apache.spark.ml.{PipelineModel, Transformer} import org.apache.spark.sql.{DataFrame, SparkSession} @@ -17,7 +18,6 @@ import org.apache.spark.ml.bundle.SparkBundleContext import ml.combust.mleap.spark.SparkSupport._ import ml.combust.mleap.runtime.transformer.Pipeline import resource._ - import org.apache.spark.sql.functions.col import org.apache.spark.sql.Row import org.apache.spark.ml.linalg.{Vector, Vectors} @@ -78,9 +78,14 @@ abstract class SparkParityBase extends FunSpec with BeforeAndAfterAll { def serializedModel(transformer: Transformer) (implicit context: SparkBundleContext): File = { bundleCache.getOrElse { - new File("/tmp/mleap/spark-parity").mkdirs() - val file = new File(s"/tmp/mleap/spark-parity/${getClass.getName}.zip") - file.delete() + + val tempDirPath = { + val temp: Path = Files.createTempDirectory("mleap-spark-parity") + temp.toFile.deleteOnExit() + temp.toAbsolutePath + } + + val file = new File(s"${tempDirPath}/${getClass.getName}.zip") for(bf <- managed(BundleFile(file))) { transformer.writeBundle.format(SerializationFormat.Json).save(bf).get diff --git a/mleap-tensorflow/src/test/scala/ml/combust/mleap/tensorflow/TestUtil.scala b/mleap-tensorflow/src/test/scala/ml/combust/mleap/tensorflow/TestUtil.scala index d59b582b0..52f8392c3 100644 --- a/mleap-tensorflow/src/test/scala/ml/combust/mleap/tensorflow/TestUtil.scala +++ b/mleap-tensorflow/src/test/scala/ml/combust/mleap/tensorflow/TestUtil.scala @@ -1,6 +1,7 @@ package ml.combust.mleap.tensorflow import java.io.File +import java.nio.file.{Files, Path} import org.tensorflow @@ -24,9 +25,11 @@ object TestUtil { graph } - val baseDir = new File("/tmp/mleap-tensorflow") - TestUtil.delete(baseDir) - baseDir.mkdirs() + val baseDir = { + val temp: Path = Files.createTempDirectory("mleap-tensorflow") + temp.toFile.deleteOnExit() + temp.toAbsolutePath + } def delete(file: File): Array[(String, Boolean)] = { Option(file.listFiles).map(_.flatMap(f => delete(f))).getOrElse(Array()) :+ (file.getPath -> file.delete) diff --git a/mleap-xgboost-runtime/src/test/scala/ml/combust/mleap/xgboost/runtime/testing/BundleSerializationUtils.scala b/mleap-xgboost-runtime/src/test/scala/ml/combust/mleap/xgboost/runtime/testing/BundleSerializationUtils.scala index 2c079414e..430641a4c 100644 --- a/mleap-xgboost-runtime/src/test/scala/ml/combust/mleap/xgboost/runtime/testing/BundleSerializationUtils.scala +++ b/mleap-xgboost-runtime/src/test/scala/ml/combust/mleap/xgboost/runtime/testing/BundleSerializationUtils.scala @@ -1,6 +1,7 @@ package ml.combust.mleap.xgboost.runtime.testing import java.io.File +import java.nio.file.{Files, Path} import ml.combust.bundle.BundleFile import ml.combust.bundle.serializer.SerializationFormat @@ -15,9 +16,13 @@ trait BundleSerializationUtils { def serializeModelToMleapBundle(transformer: Transformer): File = { import ml.combust.mleap.runtime.MleapSupport._ - new File("/tmp/mleap/xgboost-runtime-parity").mkdirs() - val file = new File(s"/tmp/mleap/xgboost-runtime-parity/${this.getClass.getName}.zip") - file.delete() + val tempDirPath = { + val temp: Path = Files.createTempDirectory("xgboost-runtime-parity") + temp.toFile.deleteOnExit() + temp.toAbsolutePath + } + + val file = new File(s"${tempDirPath}/${this.getClass.getName}.zip") for(bf <- managed(BundleFile(file))) { transformer.writeBundle.format(SerializationFormat.Json).save(bf).get diff --git a/mleap-xgboost-spark/src/test/scala/ml/dmlc/xgboost4j/scala/spark/mleap/XGBoostClassificationModelParitySpec.scala b/mleap-xgboost-spark/src/test/scala/ml/dmlc/xgboost4j/scala/spark/mleap/XGBoostClassificationModelParitySpec.scala index 4b4cf8c5a..f26e4a04b 100644 --- a/mleap-xgboost-spark/src/test/scala/ml/dmlc/xgboost4j/scala/spark/mleap/XGBoostClassificationModelParitySpec.scala +++ b/mleap-xgboost-spark/src/test/scala/ml/dmlc/xgboost4j/scala/spark/mleap/XGBoostClassificationModelParitySpec.scala @@ -1,6 +1,7 @@ package ml.dmlc.xgboost4j.scala.spark.mleap import java.io.File +import java.nio.file.{Files, Path} import ml.combust.bundle.BundleFile import ml.combust.bundle.serializer.SerializationFormat @@ -163,8 +164,13 @@ class XGBoostClassificationModelParitySpec extends FunSpec implicit val sbc = SparkBundleContext.defaultContext.withDataset(transformer.transform(dataset)) - new File("/tmp/mleap/spark-parity").mkdirs() - val file = new File(s"/tmp/mleap/spark-parity/${classOf[XGBoostClassificationModelParitySpec].getName}.zip") + val tempDirPath = { + val temp: Path = Files.createTempDirectory("mleap-spark-parity") + temp.toFile.deleteOnExit() + temp.toAbsolutePath + } + + val file = new File(s"${tempDirPath}/${classOf[XGBoostClassificationModelParitySpec].getName}.zip") file.delete() for(bf <- managed(BundleFile(file))) { diff --git a/mleap-xgboost-spark/src/test/scala/ml/dmlc/xgboost4j/scala/spark/mleap/XGBoostRegressionModelParitySpec.scala b/mleap-xgboost-spark/src/test/scala/ml/dmlc/xgboost4j/scala/spark/mleap/XGBoostRegressionModelParitySpec.scala index fcab80bea..abe37b382 100644 --- a/mleap-xgboost-spark/src/test/scala/ml/dmlc/xgboost4j/scala/spark/mleap/XGBoostRegressionModelParitySpec.scala +++ b/mleap-xgboost-spark/src/test/scala/ml/dmlc/xgboost4j/scala/spark/mleap/XGBoostRegressionModelParitySpec.scala @@ -1,6 +1,7 @@ package ml.dmlc.xgboost4j.scala.spark.mleap import java.io.File +import java.nio.file.{Files, Path} import ml.combust.bundle.BundleFile import ml.combust.bundle.serializer.SerializationFormat @@ -96,9 +97,14 @@ class XGBoostRegressionModelParitySpec extends FunSpec implicit val sbc = SparkBundleContext.defaultContext.withDataset(transformer.transform(dataset)) bundleCache.getOrElse { - new File("/tmp/mleap/spark-parity").mkdirs() - val file = new File(s"/tmp/mleap/spark-parity/${classOf[XGBoostRegressionModelParitySpec].getName}.zip") - file.delete() + + val tempDirPath = { + val temp: Path = Files.createTempDirectory("mleap-spark-parity") + temp.toFile.deleteOnExit() + temp.toAbsolutePath + } + + val file = new File(s"${tempDirPath}/${classOf[XGBoostRegressionModelParitySpec].getName}.zip") for(bf <- managed(BundleFile(file))) { transformer.writeBundle.format(SerializationFormat.Json).save(bf).get