Skip to content
Permalink
Browse files

Make build windows compatible

  • Loading branch information...
mhamilton723 committed May 22, 2019
1 parent 94e9b21 commit 2b75b62b8bd50239564ff5d1f50a94b003881bd2
Showing with 377 additions and 1,964 deletions.
  1. +1 −0 .gitignore
  2. +45 −21 build.sbt
  3. +0 −300 notebooks/gpu/DeepLearning - Distributed CNTK training.ipynb
  4. +0 −95 notebooks/tests/BasicDFOpsSmokeTest.ipynb
  5. +6 −0 pipeline.yaml
  6. +25 −0 project/Secrets.scala
  7. +9 −0 project/UnzipUtils.scala
  8. +5 −1 project/build.sbt
  9. +3 −2 src/it/scala/com/microsoft/ml/{spark/core/test/fuzzing → nbtest}/DatabricksUtilities.scala
  10. +3 −2 src/it/scala/com/microsoft/ml/{spark/core/test/fuzzing → nbtest}/NotebookTests.scala
  11. +1 −1 src/it/scala/com/microsoft/ml/{spark/core/test/fuzzing → nbtest}/SprayUtilities.scala
  12. 0 src/{codegen → it}/scala/com/microsoft/ml/spark/codegen/CodeGen.scala
  13. +2 −1 src/{codegen → it}/scala/com/microsoft/ml/spark/codegen/CodegenConfig.scala
  14. +1 −2 src/{codegen → it}/scala/com/microsoft/ml/spark/codegen/DocGen.scala
  15. +7 −7 src/{codegen → it}/scala/com/microsoft/ml/spark/codegen/PySparkWrapper.scala
  16. +4 −5 src/{codegen → it}/scala/com/microsoft/ml/spark/codegen/PySparkWrapperTest.scala
  17. +6 −7 src/{codegen → it}/scala/com/microsoft/ml/spark/codegen/SparklyRWrapper.scala
  18. +1 −4 src/{codegen → it}/scala/com/microsoft/ml/spark/codegen/WrapperClassDoc.scala
  19. +0 −4 src/{codegen → it}/scala/com/microsoft/ml/spark/codegen/WrapperGenerator.scala
  20. +1 −1 src/{codegen → it}/scala/com/microsoft/ml/spark/codegen/WritableWrapper.scala
  21. +0 −26 src/main/python/mmlspark/cntk/train/CNTKLearner.py
  22. 0 src/main/python/mmlspark/cntk/train/__init__.py
  23. +0 −151 src/main/scala/com/microsoft/ml/spark/cntk/train/BrainscriptBuilder.scala
  24. +0 −237 src/main/scala/com/microsoft/ml/spark/cntk/train/CNTKLearner.scala
  25. +0 −270 src/main/scala/com/microsoft/ml/spark/cntk/train/CommandBuilders.scala
  26. +0 −263 src/main/scala/com/microsoft/ml/spark/cntk/train/DataConversion.scala
  27. +0 −41 src/main/scala/com/microsoft/ml/spark/cntk/train/TypeMapping.scala
  28. +0 −2 src/main/scala/com/microsoft/ml/spark/cntk/train/_CNTKLearner.txt
  29. +77 −0 src/main/scala/com/microsoft/ml/spark/cognitive/RESTHelpers.scala
  30. +9 −3 src/main/scala/com/microsoft/ml/spark/core/env/FileUtilities.scala
  31. +4 −0 src/test/scala/com/microsoft/ml/spark/Secrets.scala
  32. +2 −1 src/test/scala/com/microsoft/ml/spark/automl/VerifyFindBestModel.scala
  33. +0 −1 src/test/scala/com/microsoft/ml/spark/cntk/CNTKBindingSuite.scala
  34. +7 −4 src/test/scala/com/microsoft/ml/spark/cntk/CNTKTestUtils.scala
  35. +0 −290 src/test/scala/com/microsoft/ml/spark/cntk/train/ValidateCntkTrain.scala
  36. +0 −30 src/test/scala/com/microsoft/ml/spark/cntk/train/ValidateConfiguration.scala
  37. +0 −106 src/test/scala/com/microsoft/ml/spark/cntk/train/ValidateDataConversion.scala
  38. +0 −17 src/test/scala/com/microsoft/ml/spark/cntk/train/ValidateEnvironmentUtils.scala
  39. +2 −1 src/test/scala/com/microsoft/ml/spark/cognitive/AnamolyDetectionSuite.scala
  40. +2 −1 src/test/scala/com/microsoft/ml/spark/cognitive/ComputerVisionSuite.scala
  41. +3 −5 src/{main → test}/scala/com/microsoft/ml/spark/cognitive/FaceAPI.scala
  42. +2 −1 src/test/scala/com/microsoft/ml/spark/cognitive/FaceSuite.scala
  43. +2 −1 src/test/scala/com/microsoft/ml/spark/cognitive/ImageSearchSuite.scala
  44. +3 −1 src/test/scala/com/microsoft/ml/spark/cognitive/SearchWriterSuite.scala
  45. +2 −1 src/test/scala/com/microsoft/ml/spark/cognitive/SpeechSuite.scala
  46. +2 −1 src/test/scala/com/microsoft/ml/spark/cognitive/TextAnalyticsSuite.scala
  47. +3 −0 src/test/scala/com/microsoft/ml/spark/core/serialize/ValidateComplexParamSerializer.scala
  48. +1 −0 src/test/scala/com/microsoft/ml/spark/core/test/base/TestBase.scala
  49. +8 −6 src/test/scala/com/microsoft/ml/spark/core/test/benchmarks/Benchmarks.scala
  50. +4 −2 src/test/scala/com/microsoft/ml/spark/downloader/DownloaderSuite.scala
  51. +2 −3 src/test/scala/com/microsoft/ml/spark/featurize/VerifyFeaturize.scala
  52. +8 −5 src/test/scala/com/microsoft/ml/spark/image/ImageFeaturizerSuite.scala
  53. +2 −1 src/test/scala/com/microsoft/ml/spark/image/ImageSetAugmenterSuite.scala
  54. +8 −5 src/test/scala/com/microsoft/ml/spark/io/binary/BinaryFileReaderSuite.scala
  55. +1 −1 src/test/scala/com/microsoft/ml/spark/io/http/ContinuousHTTPSuite.scala
  56. +4 −3 src/test/scala/com/microsoft/ml/spark/io/http/DistributedHTTPSuite.scala
  57. +2 −1 src/test/scala/com/microsoft/ml/spark/io/http/HTTPSuite.scala
  58. +1 −2 src/test/scala/com/microsoft/ml/spark/io/http/HTTPv2Suite.scala
  59. +4 −2 src/test/scala/com/microsoft/ml/spark/io/image/ImageReaderSuite.scala
  60. +1 −1 src/test/scala/com/microsoft/ml/spark/io/powerbi/PowerBiSuite.scala
  61. +29 −1 src/test/scala/com/microsoft/ml/spark/lightgbm/VerifyLightGBMClassifier.scala
  62. +12 −1 src/test/scala/com/microsoft/ml/spark/lightgbm/VerifyLightGBMRanker.scala
  63. +17 −1 src/test/scala/com/microsoft/ml/spark/lightgbm/VerifyLightGBMRegressor.scala
  64. +19 −13 src/test/scala/com/microsoft/ml/spark/opencv/ImageTransformerSuite.scala
  65. +8 −3 src/test/scala/com/microsoft/ml/spark/train/VerifyComputeModelStatistics.scala
  66. +6 −6 src/test/scala/com/microsoft/ml/spark/train/VerifyTrainRegressor.scala
@@ -18,6 +18,7 @@
*.exe
*.pyc
*.pyo
*.iml

# Generic editors
.vscode
@@ -1,3 +1,6 @@
import java.io.File
import java.net.URL
import org.apache.commons.io.FileUtils
import scala.sys.process.Process

name := "mmlspark"
@@ -23,24 +26,6 @@ libraryDependencies ++= Seq(
"com.microsoft.ml.lightgbm" % "lightgbmlib" % "2.2.350"
)

lazy val IntegrationTest2 = config("it").extend(Test)

lazy val CodeGen = config("codegen").extend(Test)

val settings = Seq(
(scalastyleConfig in Test) := baseDirectory.value / "scalastyle-test-config.xml",
buildInfoKeys := Seq[BuildInfoKey](name, version, scalaVersion, sbtVersion, baseDirectory),
buildInfoPackage := "com.microsoft.ml.spark.build") ++
inConfig(IntegrationTest2)(Defaults.testSettings) ++
inConfig(CodeGen)(Defaults.testSettings)

lazy val mmlspark = (project in file("."))
.configs(IntegrationTest2)
.configs(CodeGen)
.enablePlugins(BuildInfoPlugin)
.enablePlugins(ScalaUnidocPlugin)
.settings(settings: _*)

def join(folders: String*): File = {
folders.tail.foldLeft(new File(folders.head)) { case (f, s) => new File(f, s) }
}
@@ -53,21 +38,21 @@ val pythonTestDir = join(genDir.toString, "test", "python")

packagePythonTask := {
val s: TaskStreams = streams.value
(run in CodeGen).toTask("").value
(run in IntegrationTest).toTask("").value
Process(
s"python setup.py bdist_wheel --universal -d ${pythonPackageDir.absolutePath}",
pythonSrcDir,
"MML_VERSION" -> version.value) ! s.log
}

val installPipPackageTask = TaskKey[Unit]("installPipPackage", "test python sdk")
val installPipPackageTask = TaskKey[Unit]("installPipPackage", "install python sdk")

installPipPackageTask := {
val s: TaskStreams = streams.value
publishLocal.value
packagePythonTask.value
Process(
Seq("python", "-m","wheel","install", s"mmlspark-${version.value}-py2.py3-none-any.whl", "--force"),
Seq("python", "-m", "wheel", "install", s"mmlspark-${version.value}-py2.py3-none-any.whl", "--force"),
pythonPackageDir) ! s.log
}

@@ -80,3 +65,42 @@ testPythonTask := {
Seq("python", "tools2/run_all_tests.py"),
new File(".")) ! s.log
}

val getDatasetsTask = TaskKey[Unit]("getDatasets", "download datasets used for testing")
val datasetName = "datasets-2019-05-02.tgz"
val datasetUrl = new URL(s"https://mmlspark.blob.core.windows.net/installers/$datasetName")
val datasetDir = settingKey[File]("The directory that holds the dataset")
datasetDir := {
join(target.value.toString, "scala-2.11", "datasets", datasetName.split(".".toCharArray.head).head)
}

getDatasetsTask := {
val d = datasetDir.value.getParentFile
val f = new File(d, datasetName)
if (!d.exists()) d.mkdirs()
if (!f.exists()) {
FileUtils.copyURLToFile(datasetUrl, f)
UnzipUtils.unzip(f, d)
}
}

val setupTask = TaskKey[Unit]("setup", "set up library for intellij")
setupTask := {
(Test / compile).toTask.value
(Compile / compile).toTask.value
getDatasetsTask.value
}

val settings = Seq(
(scalastyleConfig in Test) := baseDirectory.value / "scalastyle-test-config.xml",
buildInfoKeys := Seq[BuildInfoKey](name, version, scalaVersion, sbtVersion, baseDirectory, datasetDir),
parallelExecution in Test := false,
buildInfoPackage := "com.microsoft.ml.spark.build") ++
Defaults.itSettings

lazy val mmlspark = (project in file("."))
.configs(IntegrationTest)
.enablePlugins(BuildInfoPlugin)
.enablePlugins(ScalaUnidocPlugin)
.settings(settings: _*)

0 comments on commit 2b75b62

Please sign in to comment.
You can’t perform that action at this time.