Skip to content
Permalink
Browse files

Add codecov to python tests

Fix codecov upload flakiness
  • Loading branch information...
mhamilton723 committed Jul 17, 2019
1 parent b6ba62f commit fac542e2f6f80e51d8c62b5886b5804cc7481873
@@ -31,4 +31,9 @@
.DS_Store

# Target dir
**/target/*
**/target/*

# python test coverage
.coverage
coverage.xml
htmlcov
@@ -200,7 +200,12 @@ testPythonTask := {
val s = streams.value
installPipPackageTask.value
Process(
activateCondaEnv ++ Seq("python", "tools/pytest/run_all_tests.py"),
activateCondaEnv ++ Seq("pytest",
"--cov=mmlspark",
"--junitxml=target/python-test-results.xml",
"--cov-report=xml",
"target/scala-2.11/generated/test/python/mmlspark"
),
new File("."),
"MML_VERSION" -> version.value
) ! s.log
@@ -1,16 +1,28 @@
codecov:
notify:
require_ci_to_pass: yes
require_ci_to_pass: no

coverage:
precision: 2
round: down
range: "50...100"
status:
project:
default:
default: off
scala:
flags: scala
target: auto
patch:
default:
python:
flags: python
target: auto
changes: no
patch:
default: off
changes: no

flags:
scala:
paths:
- src/main/scala
python:
paths:
- src/main/python
@@ -4,7 +4,9 @@ dependencies:
- pyspark=2.4.3
- requests
- pip:
- unittest-xml-reporting
- wheel
- sphinx
- sphinx_rtd_theme
- coverage
- pytest
- pytest-cov
@@ -83,14 +83,45 @@ jobs:
displayName: Create Anaconda environment
- bash: |
source activate mmlspark
sbt testPython
sbt coverage testPython
displayName: Test Python Code
- task: PublishTestResults@2
displayName: 'Publish Test Results **/test-reports/*.xml'
displayName: 'Publish Test Results'
inputs:
testResultsFiles: 'target/**/generated/test_results/python/*.xml'
testResultsFiles: '**/python-test-*.xml'
failTaskOnFailedTests: true
condition: succeededOrFailed()
- task: AzureCLI@1
displayName: 'Generate Codecov report'
inputs:
azureSubscription: 'Findable Incubation(ca9d21ff-2a46-4e8b-bf06-8d65242342e5)'
scriptLocation: inlineScript
inlineScript: 'sbt coverageReport'
condition: succeededOrFailed()
- task: AzureKeyVault@1
inputs:
azureSubscription: 'Findable Incubation(ca9d21ff-2a46-4e8b-bf06-8d65242342e5)'
keyVaultName: mmlspark-keys
condition: succeededOrFailed()
- bash: |
echo '##vso[task.setvariable variable=retry]true'
(bash <(curl -s https://codecov.io/bash) -t $(codecov-token) \
-f coverage.xml -F python
bash <(curl -s https://codecov.io/bash) -t $(codecov-token) \
-f target/scala-2.11/coverage-report/cobertura.xml -F scala) &&
echo '##vso[task.setvariable variable=retry]false'
displayName: Upload Coverage Report To Codecov.io
timeoutInMinutes: 5
continueOnError: True
condition: succeededOrFailed()
- bash: |
bash <(curl -s https://codecov.io/bash) -t $(codecov-token) \
-f coverage.xml -F python
bash <(curl -s https://codecov.io/bash) -t $(codecov-token) \
-f target/scala-2.11/coverage-report/cobertura.xml -F scala
displayName: Retry Upload Coverage Report To Codecov.io
timeoutInMinutes: 5
condition: eq(variables.retry, 'true')
- job: UnitTests
condition: eq(variables.runTests, 'True')
@@ -161,9 +192,17 @@ jobs:
inputs:
azureSubscription: 'Findable Incubation(ca9d21ff-2a46-4e8b-bf06-8d65242342e5)'
keyVaultName: mmlspark-keys
condition: succeededOrFailed()
- bash: |
bash <(curl -s https://codecov.io/bash)
echo '##vso[task.setvariable variable=retry]true'
bash <(curl -s https://codecov.io/bash) -t $(codecov-token) -F scala \
&& echo '##vso[task.setvariable variable=retry]false'
displayName: Upload Coverage Report To Codecov.io
timeoutInMinutes: 5
env:
CODECOV_TOKEN: $(codecov-token)
continueOnError: True
condition: succeededOrFailed()
- bash: |
bash <(curl -s https://codecov.io/bash) -t $(codecov-token) -F scala
displayName: Upload Coverage Report To Codecov.io
timeoutInMinutes: 5
condition: eq(variables.retry, 'true')
@@ -47,20 +47,14 @@ object CodeGen {
// FileUtils.forceDelete(pyDir)
}

private def allTopLevelFiles(dir: File, pred: (File => Boolean) = null): Array[File] = {
def loop(dir: File): Array[File] = {
val (dirs, files) = dir.listFiles.sorted.partition(_.isDirectory)
if (pred == null) files else files.filter(pred)
}
loop(dir)
}

private def makeInitFiles(packageFolder: String = ""): Unit = {
val dir = new File(new File(pySrcDir,"mmlspark"), packageFolder)
val packageString = if (packageFolder != "") packageFolder.replace("/",".") else ""
val importStrings =
allTopLevelFiles(dir, f => "^[a-zA-Z]\\w*[.]py$".r.findFirstIn(f.getName).isDefined)
.map(f => s"from mmlspark$packageString.${getBaseName(f.getName)} import *\n").mkString("")
dir.listFiles.filter(_.isFile).sorted
.map(_.getName)
.filter(name => name.endsWith(".py") && !name.startsWith("_") && !name.startsWith("test"))
.map(name => s"from mmlspark$packageString.${getBaseName(name)} import *\n").mkString("")
writeFile(new File(dir, "__init__.py"), packageHelp(importStrings))
dir.listFiles().filter(_.isDirectory).foreach(f =>
makeInitFiles(packageFolder +"/" + f.getName)
@@ -69,10 +69,8 @@ abstract class PySparkWrapperParamsTest(entryPoint: Params,

protected val unittestString =
s"""|
|import os, xmlrunner
|if __name__ == "__main__":
| result = unittest.main(testRunner=xmlrunner.XMLTestRunner(output=os.getenv("TEST_RESULTS","TestResults")),
| failfast=False, buffer=False, catchbreak=False)
| result = unittest.main()
|""".stripMargin

protected def setAndGetTemplate(paramName: String, value: String) =
@@ -275,7 +273,6 @@ abstract class PySparkWrapperParamsTest(entryPoint: Params,
def writeWrapperToFile(dir: File): Unit = {
val packageDir = subPackages.foldLeft(dir){ case (base, folder) => new File(base, folder)}
packageDir.mkdirs()
new File(packageDir, "__init__.py").createNewFile()
writeFile(new File(packageDir,"test_" + entryPointName + ".py"), pysparkWrapperTestBuilder())
}

No changes.
@@ -2,7 +2,6 @@
import os
import pyspark
import unittest
import xmlrunner
from mmlspark.recommendation.RankingAdapter import RankingAdapter
from mmlspark.recommendation.RankingEvaluator import RankingEvaluator
from mmlspark.recommendation.RankingTrainValidationSplit import RankingTrainValidationSplit
@@ -166,5 +165,4 @@ def ignore_all_tiny(self):


if __name__ == "__main__":
result = unittest.main(testRunner=xmlrunner.XMLTestRunner(output=os.getenv("TEST_RESULTS", "TestResults")),
failfast=False, buffer=False, catchbreak=False)
result = unittest.main()
@@ -59,17 +59,17 @@ class SARSpec extends RankingTestBase with EstimatorFuzzing[SAR] {
assert(recs.count == 2)
}

val testFile: String = getClass.getResource("/demoUsage.csv.gz").getPath
val sim_count1: String = getClass.getResource("/sim_count1.csv.gz").getPath
val sim_lift1: String = getClass.getResource("/sim_lift1.csv.gz").getPath
val sim_jac1: String = getClass.getResource("/sim_jac1.csv.gz").getPath
val sim_count3: String = getClass.getResource("/sim_count3.csv.gz").getPath
val sim_lift3: String = getClass.getResource("/sim_lift3.csv.gz").getPath
val sim_jac3: String = getClass.getResource("/sim_jac3.csv.gz").getPath
val user_aff: String = getClass.getResource("/user_aff.csv.gz").getPath
val userpred_count3: String = getClass.getResource("/userpred_count3_userid_only.csv.gz").getPath
val userpred_lift3: String = getClass.getResource("/userpred_lift3_userid_only.csv.gz").getPath
val userpred_jac3: String = getClass.getResource("/userpred_jac3_userid_only.csv.gz").getPath
lazy val testFile: String = getClass.getResource("/demoUsage.csv.gz").getPath
lazy val sim_count1: String = getClass.getResource("/sim_count1.csv.gz").getPath
lazy val sim_lift1: String = getClass.getResource("/sim_lift1.csv.gz").getPath
lazy val sim_jac1: String = getClass.getResource("/sim_jac1.csv.gz").getPath
lazy val sim_count3: String = getClass.getResource("/sim_count3.csv.gz").getPath
lazy val sim_lift3: String = getClass.getResource("/sim_lift3.csv.gz").getPath
lazy val sim_jac3: String = getClass.getResource("/sim_jac3.csv.gz").getPath
lazy val user_aff: String = getClass.getResource("/user_aff.csv.gz").getPath
lazy val userpred_count3: String = getClass.getResource("/userpred_count3_userid_only.csv.gz").getPath
lazy val userpred_lift3: String = getClass.getResource("/userpred_lift3_userid_only.csv.gz").getPath
lazy val userpred_jac3: String = getClass.getResource("/userpred_jac3_userid_only.csv.gz").getPath

private lazy val tlcSampleData: DataFrame = session.read
.option("header", "true") //reading the headers

0 comments on commit fac542e

Please sign in to comment.
You can’t perform that action at this time.