Skip to content
Permalink
Browse files

improve flakiness

  • Loading branch information...
mhamilton723 committed Jul 11, 2019
1 parent aa3c98f commit 41da2b7af2bace4ce0715b50a1db050cd67207e3
@@ -144,13 +144,12 @@ jobs:
PACKAGE: "image"
io:
PACKAGE: "io"
flaky: True
flaky:
PACKAGE: "flaky" #TODO fix flaky test so isolation is not needed
lightgbm1:
PACKAGE: "lightgbm.split1" #TODO fix LGBM Tests so no splitting is needed
flaky: True
PACKAGE: "lightgbm.split1" #TODO speed up LGBM Tests and remove split
lightgbm2:
PACKAGE: "lightgbm.split2"
flaky: True
lime:
PACKAGE: "lime"
opencv:
@@ -170,15 +169,6 @@ jobs:
inlineScript: 'pip install requests && sbt getDatasets'
- task: AzureCLI@1
displayName: 'Unit Test'
continueOnError: True
timeoutInMinutes: 20
inputs:
azureSubscription: 'Findable Incubation(ca9d21ff-2a46-4e8b-bf06-8d65242342e5)'
scriptLocation: inlineScript
inlineScript: 'sbt coverage "testOnly com.microsoft.ml.spark.$(PACKAGE).**"'
- task: AzureCLI@1
condition: and(eq(variables.Agent.JobStatus, 'SucceededWithIssues'), eq(variables.flaky, 'True'))
displayName: 'Flaky Unit Test Retry'
timeoutInMinutes: 20
inputs:
azureSubscription: 'Findable Incubation(ca9d21ff-2a46-4e8b-bf06-8d65242342e5)'
@@ -3,7 +3,6 @@

package com.microsoft.ml.spark.core.test.base

import java.io.File
import java.nio.file.Files

import org.apache.commons.io.FileUtils
@@ -12,13 +11,15 @@ import org.apache.spark.ml._
import org.apache.spark.ml.linalg.DenseVector
import org.apache.spark.sql.functions.col
import org.apache.spark.sql.{DataFrame, _}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.{Seconds=>SparkSeconds,StreamingContext}
import org.scalactic.source.Position
import org.scalactic.{Equality, TolerantNumerics}
import org.scalatest._
import org.scalatest.concurrent.TimeLimits
import org.scalatest.time.{Seconds, Span}

import scala.concurrent._
import scala.reflect.ClassTag
import scala.concurrent.blocking

// Common test tags
object TestBase {
@@ -41,9 +42,29 @@ trait LinuxOnly extends TestBase {

trait Flaky extends TestBase {

val retyMillis: Array[Int] = Array(0,100,100)

override def test(testName: String, testTags: Tag*)(testFun: => Any)(implicit pos: Position): Unit = {
super.test(testName, testTags: _*){
tryWithRetries(retyMillis)(testFun _)
}
}

}

trait TimeLimitedFlaky extends TestBase with TimeLimits {

val timeoutInSeconds: Int = 5*60

val retyMillis: Array[Int] = Array(0,100,100)

override def test(testName: String, testTags: Tag*)(testFun: => Any)(implicit pos: Position): Unit = {
super.test(testName, testTags: _*){
tryWithRetries(Array(0,100,100))(testFun _)
tryWithRetries(retyMillis) {
failAfter(Span(timeoutInSeconds, Seconds)){
println("Executing time-limited flaky function")
testFun _}
}
}
}

@@ -68,7 +89,7 @@ abstract class TestBase extends FunSuite with BeforeAndAfterEachTestData with Be
}

protected lazy val sc: SparkContext = session.sparkContext
protected lazy val ssc: StreamingContext = new StreamingContext(sc, Seconds(1))
protected lazy val ssc: StreamingContext = new StreamingContext(sc, SparkSeconds(1))

protected lazy val dir = SparkSessionFactory.workingDir

@@ -1,16 +1,18 @@
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.

package com.microsoft.ml.spark.io.http
package com.microsoft.ml.spark.flaky

import com.microsoft.ml.spark.core.test.base.TimeLimitedFlaky
import com.microsoft.ml.spark.core.test.fuzzing.{TestObject, TransformerFuzzing}
import com.microsoft.ml.spark.io.http.PartitionConsolidator
import org.apache.spark.ml.util.MLReadable
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.types.{DoubleType, StructType}
import org.apache.spark.sql.{DataFrame, Dataset, Row}
import org.scalatest.Assertion

class PartitionConsolidatorSuite extends TransformerFuzzing[PartitionConsolidator] {
class PartitionConsolidatorSuite extends TransformerFuzzing[PartitionConsolidator] with TimeLimitedFlaky {

import session.implicits._

0 comments on commit 41da2b7

Please sign in to comment.
You can’t perform that action at this time.