Skip to content

Commit

Permalink
[SPARK-24825][K8S][TEST] Kubernetes integration tests build the whole…
Browse files Browse the repository at this point in the history
… reactor.

In order to not run all the non-Kubernetes integration tests in the build, support specifying tags and tag all integration tests specifically with "k8s". Supply the k8s tag in the dev/dev-run-integration-tests.sh script.
  • Loading branch information
mccheah committed Jul 17, 2018
1 parent 2a4dd6f commit 6a89c65
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 26 deletions.
3 changes: 3 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,7 @@

<test.java.home>${java.home}</test.java.home>
<test.exclude.tags></test.exclude.tags>
<test.include.tags></test.include.tags>

<!-- Package to use when relocating shaded classes. -->
<spark.shade.packageName>org.spark_project</spark.shade.packageName>
Expand Down Expand Up @@ -2162,6 +2163,7 @@
</systemProperties>
<failIfNoTests>false</failIfNoTests>
<excludedGroups>${test.exclude.tags}</excludedGroups>
<groups>${test.include.tags}</groups>
</configuration>
<executions>
<execution>
Expand Down Expand Up @@ -2209,6 +2211,7 @@
<test.src.tables>__not_used__</test.src.tables>
</systemProperties>
<tagsToExclude>${test.exclude.tags}</tagsToExclude>
<tagsToInclude>${test.include.tags}</tagsToInclude>
</configuration>
<executions>
<execution>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
TEST_ROOT_DIR=$(git rev-parse --show-toplevel)/resource-managers/kubernetes/integration-tests

cd "${TEST_ROOT_DIR}"
TEST_ROOT_DIR=$(git rev-parse --show-toplevel)

DEPLOY_MODE="minikube"
IMAGE_REPO="docker.io/kubespark"
Expand All @@ -27,7 +25,7 @@ IMAGE_TAG="N/A"
SPARK_MASTER=
NAMESPACE=
SERVICE_ACCOUNT=
INCLUDE_TAGS=
INCLUDE_TAGS="k8s"
EXCLUDE_TAGS=

# Parse arguments
Expand Down Expand Up @@ -62,7 +60,7 @@ while (( "$#" )); do
shift
;;
--include-tags)
INCLUDE_TAGS="$2"
INCLUDE_TAGS="k8s,$2"
shift
;;
--exclude-tags)
Expand All @@ -76,13 +74,12 @@ while (( "$#" )); do
shift
done

cd $TEST_ROOT_DIR

properties=(
-Dspark.kubernetes.test.sparkTgz=$SPARK_TGZ \
-Dspark.kubernetes.test.imageTag=$IMAGE_TAG \
-Dspark.kubernetes.test.imageRepo=$IMAGE_REPO \
-Dspark.kubernetes.test.deployMode=$DEPLOY_MODE
-Dspark.kubernetes.test.deployMode=$DEPLOY_MODE \
-Dtest.include.tags=$INCLUDE_TAGS
)

if [ -n $NAMESPACE ];
Expand All @@ -105,9 +102,4 @@ then
properties=( ${properties[@]} -Dtest.exclude.tags=$EXCLUDE_TAGS )
fi

if [ -n $INCLUDE_TAGS ];
then
properties=( ${properties[@]} -Dtest.include.tags=$INCLUDE_TAGS )
fi

../../../build/mvn integration-test ${properties[@]}
$TEST_ROOT_DIR/build/mvn integration-test -f $TEST_ROOT_DIR/pom.xml -pl resource-managers/kubernetes/integration-tests -am -Pkubernetes -Phadoop-2.7 ${properties[@]}
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import java.util.regex.Pattern

import com.google.common.io.PatternFilenameFilter
import io.fabric8.kubernetes.api.model.Pod
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll}
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, Tag}
import org.scalatest.concurrent.{Eventually, PatienceConfiguration}
import org.scalatest.time.{Minutes, Seconds, Span}
import scala.collection.JavaConverters._
Expand All @@ -47,6 +47,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite
private var containerLocalSparkDistroExamplesJar: String = _
private var appLocator: String = _
private var driverPodName: String = _
private val k8sTestTag = Tag("k8s")

override def beforeAll(): Unit = {
// The scalatest-maven-plugin gives system properties that are referenced but not set null
Expand Down Expand Up @@ -102,22 +103,22 @@ private[spark] class KubernetesSuite extends SparkFunSuite
deleteDriverPod()
}

test("Run SparkPi with no resources") {
test("Run SparkPi with no resources", k8sTestTag) {
runSparkPiAndVerifyCompletion()
}

test("Run SparkPi with a very long application name.") {
test("Run SparkPi with a very long application name.", k8sTestTag) {
sparkAppConf.set("spark.app.name", "long" * 40)
runSparkPiAndVerifyCompletion()
}

test("Use SparkLauncher.NO_RESOURCE") {
test("Use SparkLauncher.NO_RESOURCE", k8sTestTag) {
sparkAppConf.setJars(Seq(containerLocalSparkDistroExamplesJar))
runSparkPiAndVerifyCompletion(
appResource = SparkLauncher.NO_RESOURCE)
}

test("Run SparkPi with a master URL without a scheme.") {
test("Run SparkPi with a master URL without a scheme.", k8sTestTag) {
val url = kubernetesTestComponents.kubernetesClient.getMasterUrl
val k8sMasterUrl = if (url.getPort < 0) {
s"k8s://${url.getHost}"
Expand All @@ -128,11 +129,11 @@ private[spark] class KubernetesSuite extends SparkFunSuite
runSparkPiAndVerifyCompletion()
}

test("Run SparkPi with an argument.") {
test("Run SparkPi with an argument.", k8sTestTag) {
runSparkPiAndVerifyCompletion(appArgs = Array("5"))
}

test("Run SparkPi with custom labels, annotations, and environment variables.") {
test("Run SparkPi with custom labels, annotations, and environment variables.", k8sTestTag) {
sparkAppConf
.set("spark.kubernetes.driver.label.label1", "label1-value")
.set("spark.kubernetes.driver.label.label2", "label2-value")
Expand All @@ -158,21 +159,21 @@ private[spark] class KubernetesSuite extends SparkFunSuite
})
}

test("Run extraJVMOptions check on driver") {
test("Run extraJVMOptions check on driver", k8sTestTag) {
sparkAppConf
.set("spark.driver.extraJavaOptions", "-Dspark.test.foo=spark.test.bar")
runSparkJVMCheckAndVerifyCompletion(
expectedJVMValue = Seq("(spark.test.foo,spark.test.bar)"))
}

test("Run SparkRemoteFileTest using a remote data file") {
test("Run SparkRemoteFileTest using a remote data file", k8sTestTag) {
sparkAppConf
.set("spark.files", REMOTE_PAGE_RANK_DATA_FILE)
runSparkRemoteCheckAndVerifyCompletion(
appArgs = Array(REMOTE_PAGE_RANK_FILE_NAME))
}

test("Run PySpark on simple pi.py example") {
test("Run PySpark on simple pi.py example", k8sTestTag) {
sparkAppConf
.set("spark.kubernetes.container.image", s"${getTestImageRepo}/spark-py:${getTestImageTag}")
runSparkApplicationAndVerifyCompletion(
Expand All @@ -186,7 +187,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite
isJVM = false)
}

test("Run PySpark with Python2 to test a pyfiles example") {
test("Run PySpark with Python2 to test a pyfiles example", k8sTestTag) {
sparkAppConf
.set("spark.kubernetes.container.image", s"${getTestImageRepo}/spark-py:${getTestImageTag}")
.set("spark.kubernetes.pyspark.pythonversion", "2")
Expand All @@ -204,7 +205,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite
pyFiles = Some(PYSPARK_CONTAINER_TESTS))
}

test("Run PySpark with Python3 to test a pyfiles example") {
test("Run PySpark with Python3 to test a pyfiles example", k8sTestTag) {
sparkAppConf
.set("spark.kubernetes.container.image", s"${getTestImageRepo}/spark-py:${getTestImageTag}")
.set("spark.kubernetes.pyspark.pythonversion", "3")
Expand Down

0 comments on commit 6a89c65

Please sign in to comment.