Skip to content

Commit

Permalink
Merge pull request #717 from lucagiovagnoli/luca-tempDir-concurrency
Browse files Browse the repository at this point in the history
Create proper temporary directories
  • Loading branch information
ancasarb committed Sep 28, 2020
2 parents c4d9253 + 6f58ce4 commit ad4ad3f
Show file tree
Hide file tree
Showing 7 changed files with 52 additions and 21 deletions.
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package ml.combust.mleap.databricks.runtime.testkit

import java.io.File
import java.nio.file.{Files, Path}

import org.tensorflow

Expand All @@ -24,9 +25,11 @@ object TensorFlowTestUtil {
graph
}

val baseDir = new File("/tmp/mleap-tensorflow")
TensorFlowTestUtil.delete(baseDir)
baseDir.mkdirs()
val baseDir = {
val temp: Path = Files.createTempDirectory("mleap-tensorflow")
temp.toFile.deleteOnExit()
temp.toAbsolutePath
}

def delete(file: File): Array[(String, Boolean)] = {
Option(file.listFiles).map(_.flatMap(f => delete(f))).getOrElse(Array()) :+ (file.getPath -> file.delete)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package ml.combust.mleap.runtime.test

import java.io.File
import java.nio.file.{Files, Path}

import ml.combust.mleap.core.regression.DecisionTreeRegressionModel
import ml.combust.mleap.core.tree.{ContinuousSplit, InternalNode, LeafNode, Node}
Expand All @@ -10,9 +11,11 @@ import ml.combust.mleap.core.tree.{ContinuousSplit, InternalNode, LeafNode, Node
*/
object TestUtil {

val baseDir = new File("/tmp/mleap-runtime")
TestUtil.delete(baseDir)
baseDir.mkdirs()
val baseDir = {
val temp: Path = Files.createTempDirectory("mleap-runtime")
temp.toFile.deleteOnExit()
temp.toAbsolutePath
}

def delete(file: File): Array[(String, Boolean)] = {
Option(file.listFiles).map(_.flatMap(f => delete(f))).getOrElse(Array()) :+ (file.getPath -> file.delete)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package org.apache.spark.ml.parity

import java.io.File
import java.nio.file.{Files, Path}

import org.apache.spark.ml.{PipelineModel, Transformer}
import org.apache.spark.sql.{DataFrame, SparkSession}
Expand All @@ -17,7 +18,6 @@ import org.apache.spark.ml.bundle.SparkBundleContext
import ml.combust.mleap.spark.SparkSupport._
import ml.combust.mleap.runtime.transformer.Pipeline
import resource._

import org.apache.spark.sql.functions.col
import org.apache.spark.sql.Row
import org.apache.spark.ml.linalg.{Vector, Vectors}
Expand Down Expand Up @@ -78,9 +78,14 @@ abstract class SparkParityBase extends FunSpec with BeforeAndAfterAll {
def serializedModel(transformer: Transformer)
(implicit context: SparkBundleContext): File = {
bundleCache.getOrElse {
new File("/tmp/mleap/spark-parity").mkdirs()
val file = new File(s"/tmp/mleap/spark-parity/${getClass.getName}.zip")
file.delete()

val tempDirPath = {
val temp: Path = Files.createTempDirectory("mleap-spark-parity")
temp.toFile.deleteOnExit()
temp.toAbsolutePath
}

val file = new File(s"${tempDirPath}/${getClass.getName}.zip")

for(bf <- managed(BundleFile(file))) {
transformer.writeBundle.format(SerializationFormat.Json).save(bf).get
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package ml.combust.mleap.tensorflow

import java.io.File
import java.nio.file.{Files, Path}

import org.tensorflow

Expand All @@ -24,9 +25,11 @@ object TestUtil {
graph
}

val baseDir = new File("/tmp/mleap-tensorflow")
TestUtil.delete(baseDir)
baseDir.mkdirs()
val baseDir = {
val temp: Path = Files.createTempDirectory("mleap-tensorflow")
temp.toFile.deleteOnExit()
temp.toAbsolutePath
}

def delete(file: File): Array[(String, Boolean)] = {
Option(file.listFiles).map(_.flatMap(f => delete(f))).getOrElse(Array()) :+ (file.getPath -> file.delete)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package ml.combust.mleap.xgboost.runtime.testing

import java.io.File
import java.nio.file.{Files, Path}

import ml.combust.bundle.BundleFile
import ml.combust.bundle.serializer.SerializationFormat
Expand All @@ -15,9 +16,13 @@ trait BundleSerializationUtils {
def serializeModelToMleapBundle(transformer: Transformer): File = {
import ml.combust.mleap.runtime.MleapSupport._

new File("/tmp/mleap/xgboost-runtime-parity").mkdirs()
val file = new File(s"/tmp/mleap/xgboost-runtime-parity/${this.getClass.getName}.zip")
file.delete()
val tempDirPath = {
val temp: Path = Files.createTempDirectory("xgboost-runtime-parity")
temp.toFile.deleteOnExit()
temp.toAbsolutePath
}

val file = new File(s"${tempDirPath}/${this.getClass.getName}.zip")

for(bf <- managed(BundleFile(file))) {
transformer.writeBundle.format(SerializationFormat.Json).save(bf).get
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package ml.dmlc.xgboost4j.scala.spark.mleap

import java.io.File
import java.nio.file.{Files, Path}

import ml.combust.bundle.BundleFile
import ml.combust.bundle.serializer.SerializationFormat
Expand Down Expand Up @@ -163,8 +164,13 @@ class XGBoostClassificationModelParitySpec extends FunSpec

implicit val sbc = SparkBundleContext.defaultContext.withDataset(transformer.transform(dataset))

new File("/tmp/mleap/spark-parity").mkdirs()
val file = new File(s"/tmp/mleap/spark-parity/${classOf[XGBoostClassificationModelParitySpec].getName}.zip")
val tempDirPath = {
val temp: Path = Files.createTempDirectory("mleap-spark-parity")
temp.toFile.deleteOnExit()
temp.toAbsolutePath
}

val file = new File(s"${tempDirPath}/${classOf[XGBoostClassificationModelParitySpec].getName}.zip")
file.delete()

for(bf <- managed(BundleFile(file))) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package ml.dmlc.xgboost4j.scala.spark.mleap

import java.io.File
import java.nio.file.{Files, Path}

import ml.combust.bundle.BundleFile
import ml.combust.bundle.serializer.SerializationFormat
Expand Down Expand Up @@ -96,9 +97,14 @@ class XGBoostRegressionModelParitySpec extends FunSpec
implicit val sbc = SparkBundleContext.defaultContext.withDataset(transformer.transform(dataset))

bundleCache.getOrElse {
new File("/tmp/mleap/spark-parity").mkdirs()
val file = new File(s"/tmp/mleap/spark-parity/${classOf[XGBoostRegressionModelParitySpec].getName}.zip")
file.delete()

val tempDirPath = {
val temp: Path = Files.createTempDirectory("mleap-spark-parity")
temp.toFile.deleteOnExit()
temp.toAbsolutePath
}

val file = new File(s"${tempDirPath}/${classOf[XGBoostRegressionModelParitySpec].getName}.zip")

for(bf <- managed(BundleFile(file))) {
transformer.writeBundle.format(SerializationFormat.Json).save(bf).get
Expand Down

0 comments on commit ad4ad3f

Please sign in to comment.