Skip to content

Commit

Permalink
[KYUUBI #2214] Add Spark Engine on Kubernetes integration test
Browse files Browse the repository at this point in the history
### _Why are the changes needed?_

Rename `kyuubi-deployment-kubernetes-it` to `kyuubi-kubernetes-it` which include:
- Kyuubi Server on Kubernetes it
- Kyuubi Spark Engine on Kubernetes it

Now, we use the package name to distingush the scope of it:
- deployment package is for Kyuubi Server
- spark package is for Spark Engine

So it's easy to add Flink Engine on kubernetes it in future.

Note that, this PR only supports Spark client mode on Kubernetes it.

Since Spark only have two tags of docker image https://hub.docker.com/r/apache/spark/tags, this PR only test 3.2.1 version

### _How was this patch tested?_
- [x] Add some test cases that check the changes thoroughly including negative and positive cases if possible

- [ ] Add screenshots for manual tests if appropriate

- [ ] [Run test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests) locally before make a pull request

Closes #2214 from ulysses-you/spark-on-k8s-client.

Closes #2214

f6dda43 [ulysses-you] Add Spark Engine on Kubernetes integration test

Authored-by: ulysses-you <ulyssesyou18@gmail.com>
Signed-off-by: ulysses-you <ulyssesyou@apache.org>
  • Loading branch information
ulysses-you committed Mar 25, 2022
1 parent c09cd65 commit 4f0323d
Show file tree
Hide file tree
Showing 11 changed files with 142 additions and 21 deletions.
2 changes: 1 addition & 1 deletion .github/labeler.yml
Expand Up @@ -84,7 +84,7 @@
- ".dockerignore"
- "bin/docker-image-tool.sh"
- "docker/**/*"
- "integration-tests/kyuubi-kubernetes-deployment-it/**/*"
- "integration-tests/kyuubi-kubernetes-it/**/*"
- "tools/spark-block-cleaner/**/*"

"module:metrics":
Expand Down
52 changes: 43 additions & 9 deletions .github/workflows/master.yml
Expand Up @@ -84,7 +84,7 @@ jobs:
- name: Code coverage
if: |
matrix.java == 8 &&
matrix.spark == '3.2' &&
matrix.spark == '3.2' &&
matrix.spark-archive == ''
uses: codecov/codecov-action@v2
with:
Expand Down Expand Up @@ -123,8 +123,8 @@ jobs:
-Dmaven.plugin.scalatest.exclude.tags=''
-Dtest=none -DwildcardSuites=org.apache.kyuubi.operation.tpcds
minikube-it:
name: Minikube Integration Test
kyuubi-on-k8s-it:
name: Kyuubi Server On Kubernetes Integration Test
runs-on: ubuntu-20.04
steps:
- name: Checkout
Expand All @@ -145,27 +145,61 @@ jobs:
- name: Setup Minikube
uses: manusa/actions-setup-minikube@v2.4.3
with:
minikube version: 'v1.16.0'
kubernetes version: 'v1.19.2'
minikube version: 'v1.25.2'
kubernetes version: 'v1.23.3'
- name: kubectl pre-check
run: |
kubectl get serviceaccount
kubectl create serviceaccount kyuubi
kubectl get serviceaccount
- name: start kyuubi
run: kubectl apply -f integration-tests/kyuubi-kubernetes-deployment-it/test-k8s.yaml
run: kubectl apply -f integration-tests/kyuubi-kubernetes-it/src/test/resources/kyuubi-server.yaml
- name: kyuubi pod check
run: kubectl get pods
- name: integration tests
run: >-
./build/mvn clean install -Dmaven.javadoc.skip=true -Drat.skip=true -Dscalastyle.skip=true -Dspotless.check.skip -Dorg.slf4j.simpleLogger.defaultLogLevel=warn -V
-pl integration-tests/kyuubi-kubernetes-deployment-it -am
-Pkubernetes-deployment-it
-Dtest=none -DwildcardSuites=org.apache.kyuubi.kubernetes.test
-pl integration-tests/kyuubi-kubernetes-it -am
-Pkubernetes-it
-Dtest=none -DwildcardSuites=org.apache.kyuubi.kubernetes.test.deployment
- name: Upload test logs
if: failure()
uses: actions/upload-artifact@v2
with:
name: unit-tests-log
path: |
**/target/unit-tests.log
spark-on-k8s-it:
name: Spark Engine On Kubernetes Integration Test
runs-on: ubuntu-20.04
steps:
- name: Checkout
uses: actions/checkout@v2
# from https://github.com/marketplace/actions/setup-minikube-kubernetes-cluster
- name: Setup Minikube
uses: manusa/actions-setup-minikube@v2.4.3
with:
minikube version: 'v1.25.2'
kubernetes version: 'v1.23.3'
driver: docker
start args: '--extra-config=kubeadm.ignore-preflight-errors=NumCPU --force --cpus 2 --memory 4096'
- name: integration tests
run: >-
./build/mvn clean install
-Dmaven.javadoc.skip=true
-Drat.skip=true
-Dscalastyle.skip=true
-Dspotless.check.skip
-Dspark.version=3.2.1
-Pflink-provided,hive-provided
-Pkubernetes-it
-Dtest=none -DwildcardSuites=org.apache.kyuubi.kubernetes.test.spark
- name: Upload test logs
if: failure()
uses: actions/upload-artifact@v2
with:
name: unit-tests-log
path: |
**/target/unit-tests.log
**/kyuubi-spark-sql-engine.log*
Expand Up @@ -27,8 +27,8 @@
</parent>
<modelVersion>4.0.0</modelVersion>

<artifactId>kubernetes-deployment-integration-tests_2.12</artifactId>
<name>Kyuubi Test Kubernetes Deployment IT</name>
<artifactId>kubernetes-integration-tests_2.12</artifactId>
<name>Kyuubi Test Kubernetes IT</name>
<url>https://kyuubi.apache.org/</url>

<dependencies>
Expand All @@ -47,6 +47,21 @@
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.apache.kyuubi</groupId>
<artifactId>kyuubi-server_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.apache.kyuubi</groupId>
<artifactId>kyuubi-server_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jul-to-slf4j</artifactId>
Expand Down
Expand Up @@ -15,13 +15,23 @@
* limitations under the License.
*/

package org.apache.kyuubi.kubernetes.test
package org.apache.kyuubi.kubernetes.test.deployment

import org.apache.kyuubi.Logging
import org.apache.kyuubi.kubernetes.test.MiniKube
import org.apache.kyuubi.operation.SparkQueryTests

// TODO: [KYUUBI-863] Support test Spark engine using k8s master with minikube
class KubernetesJDBCTestsSuite extends SparkQueryTests with Logging {
/**
* This test is for Kyuubi Server on Kubernetes with Spark engine:
*
* Real World Kubernetes Pod
* ------------ -----------------------------------------------------
* | | JDBC | |
* | Client | ----> | Kyuubi Server ----> Spark Engine (local mode) |
* | | | |
* ------------ -----------------------------------------------------
*/
class KyuubiOnKubernetesTestsSuite extends SparkQueryTests with Logging {
private lazy val _jdbcUrl: String = {
val kubernetesclient = MiniKube.getKubernetesClient
val kyuubiServers =
Expand Down
@@ -0,0 +1,61 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.kyuubi.kubernetes.test.spark

import org.apache.kyuubi.{Logging, WithKyuubiServer}
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.kubernetes.test.MiniKube
import org.apache.kyuubi.operation.SparkQueryTests

// TODO Support Spark Cluster mode
abstract class SparkOnKubernetesSuiteBase
extends WithKyuubiServer with SparkQueryTests with Logging {
private val apiServerAddress = {
MiniKube.getKubernetesClient.getMasterUrl.toString
}

protected def sparkOnK8sConf: KyuubiConf = {
// TODO Support more Spark version
// Spark official docker image: https://hub.docker.com/r/apache/spark/tags
KyuubiConf().set("spark.master", s"k8s://$apiServerAddress")
.set("spark.kubernetes.container.image", "apache/spark:v3.2.1")
.set("spark.kubernetes.container.image.pullPolicy", "IfNotPresent")
.set("spark.executor.instances", "1")
.set("spark.executor.cores", "1")
.set("spark.executor.memory", "512M")
.set("spark.driver.memory", "512M")
}

override protected def jdbcUrl: String = getJdbcUrl
}

/**
* This test is for Kyuubi Server with Spark engine on Kubernetes:
*
* Real World Kubernetes Pod
* ------------------------------------------------------- ---------------------
* | JDBC | | |
* | Client ----> Kyuubi Server ----> Spark Driver | ----> | Spark Executors |
* | | | |
* ------------------------------------------------------- ---------------------
*/
class SparkClientModeOnKubernetesSuite extends SparkOnKubernetesSuiteBase {
override protected val conf: KyuubiConf = {
sparkOnK8sConf.set("spark.submit.deployMode", "client")
}
}
4 changes: 2 additions & 2 deletions integration-tests/pom.xml
Expand Up @@ -37,9 +37,9 @@

<profiles>
<profile>
<id>kubernetes-deployment-it</id>
<id>kubernetes-it</id>
<modules>
<module>kyuubi-kubernetes-deployment-it</module>
<module>kyuubi-kubernetes-it</module>
</modules>
</profile>
</profiles>
Expand Down
Expand Up @@ -99,10 +99,11 @@ class SparkProcessBuilder(
.filter(Files.exists(_)).map(_.toAbsolutePath.toFile.getCanonicalPath)
}.orElse {
// 3. get the main resource from dev environment
Option(Paths.get("externals", module, "target", jarName))
.filter(Files.exists(_)).orElse {
Some(Paths.get("..", "externals", module, "target", jarName))
}.map(_.toAbsolutePath.toFile.getCanonicalPath)
val cwd = getClass.getProtectionDomain.getCodeSource.getLocation.getPath
.split("kyuubi-server")
assert(cwd.length > 1)
Option(Paths.get(cwd.head, "externals", module, "target", jarName))
.map(_.toAbsolutePath.toFile.getCanonicalPath)
}
}

Expand Down

0 comments on commit 4f0323d

Please sign in to comment.