-
Notifications
You must be signed in to change notification settings - Fork 913
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[TEST] [DELTA] Refine CI and add test detla 1.0.0 with Spark 3.1 #643
Changes from 10 commits
3204ef4
19b1b16
41a667e
a0ed837
73532b8
9d194c3
4e2d51c
4b8b68f
8337279
e68ae14
e15a3ff
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -34,11 +34,9 @@ jobs: | |
matrix: | ||
profiles: | ||
- '' | ||
- '-Pspark-3.0 -Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.1.1 -Dspark.archive.name=spark-3.1.1-bin-hadoop2.7.tgz -Dmaven.plugin.scalatest.exclude.tags=org.apache.kyuubi.tags.ExtendedSQLTest,org.apache.kyuubi.tags.DataLakeTest' | ||
- '-Pspark-3.0 -Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.1.1 -Dspark.archive.name=spark-3.1.1-bin-hadoop2.7.tgz -Dmaven.plugin.scalatest.exclude.tags=org.apache.kyuubi.tags.ExtendedSQLTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest' | ||
- '-Pspark-3.1 -Pkyuubi-extension-spark_3.1' | ||
- '-Pspark-3.1 -Pkyuubi-extension-spark_3.1 -Pspark-hadoop-3.2' | ||
- '-Pspark-3.2-snapshot -pl :kyuubi-spark-sql-engine,:kyuubi-common,:kyuubi-ha,:kyuubi-zookeeper -Dmaven.plugin.scalatest.exclude.tags=org.apache.kyuubi.tags.ExtendedSQLTest,org.apache.kyuubi.tags.DataLakeTest' | ||
- '-DwildcardSuites=org.apache.kyuubi.operation.tpcds.TPCDSOutputSchemaSuite,org.apache.kyuubi.operation.tpcds.TPCDSDDLSuite -Dmaven.plugin.scalatest.exclude.tags=""' | ||
- '-Pspark-master -pl :kyuubi-spark-sql-engine,:kyuubi-common,:kyuubi-ha,:kyuubi-zookeeper' | ||
env: | ||
SPARK_LOCAL_IP: localhost | ||
steps: | ||
|
@@ -72,16 +70,12 @@ jobs: | |
restore-keys: | | ||
${{ runner.os }}-maven-io- | ||
- name: Build with Maven | ||
if: ${{ ! contains(matrix.profiles, 'wildcardSuites') }} | ||
run: mvn clean install ${{ matrix.profiles }} -Dmaven.javadoc.skip=true -V | ||
- name: Wild Card Suites Tests | ||
if: ${{ contains(matrix.profiles, 'wildcardSuites') }} | ||
run: mvn clean install -DskipTests -pl :kyuubi-spark-sql-engine,:kyuubi-common,:kyuubi-ha,:kyuubi-zookeeper && mvn -Dtest=none ${{ matrix.profiles }} test | ||
run: mvn clean install --no-transfer-progress ${{ matrix.profiles }} -Dmaven.javadoc.skip=true -V | ||
- name: Code coverage | ||
if: ${{ matrix.profiles == '' }} | ||
run: bash <(curl -s https://codecov.io/bash) | ||
- name: Detected Dependency List Change | ||
if: ${{ ! contains(matrix.profiles, 'spark-3.2-snapshot') && ! contains(matrix.profiles, 'wildcardSuites') }} | ||
if: ${{ ! contains(matrix.profiles, 'spark-master') }} | ||
run: build/dependency.sh | ||
- name: Upload test logs | ||
if: failure() | ||
|
@@ -91,3 +85,43 @@ jobs: | |
path: | | ||
**/target/unit-tests.log | ||
**/kyuubi-spark-sql-engine.log* | ||
|
||
tpcds: | ||
name: TPC-DS Schema Tests | ||
runs-on: ubuntu-latest | ||
env: | ||
SPARK_LOCAL_IP: localhost | ||
steps: | ||
- uses: actions/checkout@v2 | ||
- name: Setup JDK 1.8 | ||
uses: actions/setup-java@v1 | ||
with: | ||
java-version: '1.8' | ||
- uses: actions/cache@v1 | ||
with: | ||
path: ~/.m2/repository/com | ||
key: ${{ runner.os }}-maven-com-${{ hashFiles('**/pom.xml') }} | ||
restore-keys: | | ||
${{ runner.os }}-maven-com- | ||
- uses: actions/cache@v1 | ||
with: | ||
path: ~/.m2/repository/org | ||
key: ${{ runner.os }}-maven-org-${{ hashFiles('**/pom.xml') }} | ||
restore-keys: | | ||
${{ runner.os }}-maven-org- | ||
- uses: actions/cache@v1 | ||
with: | ||
path: ~/.m2/repository/net | ||
key: ${{ runner.os }}-maven-net-${{ hashFiles('**/pom.xml') }} | ||
restore-keys: | | ||
${{ runner.os }}-maven-net- | ||
- uses: actions/cache@v1 | ||
with: | ||
path: ~/.m2/repository/io | ||
key: ${{ runner.os }}-maven-io-${{ hashFiles('**/pom.xml') }} | ||
restore-keys: | | ||
${{ runner.os }}-maven-io- | ||
- name: Run TPC-DS Schema Tests | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. ditto |
||
run: | | ||
mvn clean install --no-transfer-progress -Pspark-3.0 -DskipTests -pl :kyuubi-spark-sql-engine,:kyuubi-common,:kyuubi-ha,:kyuubi-zookeeper | ||
mvn test --no-transfer-progress -Pspark-3.0 -Dtest=none -DwildcardSuites=*TPCDSSuite |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -31,36 +31,45 @@ matrix: | |
- name: Test Kyuubi w/ Default Profile | ||
env: | ||
- PROFILE="" | ||
- EXCLUDE_TAGS="org.apache.kyuubi.tags.ExtendedSQLTest" | ||
- JOB="build" | ||
- CODECOV="true" | ||
- name: Test Kyuubi w/ -Pspark-3.0 and Spark 3.1 binary | ||
env: | ||
- PROFILE="-Pspark-3.0 -Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.1.1 -Dspark.archive.name=spark-3.1.1-bin-hadoop2.7.tgz" | ||
- EXCLUDE_TAGS="org.apache.kyuubi.tags.ExtendedSQLTest,org.apache.kyuubi.tags.DataLakeTest" | ||
- PROFILE="-Pspark-3.0 -Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.1.1 -Dspark.archive.name=spark-3.1.1-bin-hadoop2.7.tgz -Dmaven.plugin.scalatest.exclude.tags=org.apache.kyuubi.tags.ExtendedSQLTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest" | ||
- JOB="build" | ||
- name: Test Kyuubi w/ -Pspark-3.1 -Pkyuubi-extension-spark_3.1 | ||
env: | ||
- PROFILE="-Pspark-3.1 -Pkyuubi-extension-spark_3.1" | ||
- EXCLUDE_TAGS="org.apache.kyuubi.tags.ExtendedSQLTest,org.apache.kyuubi.tags.DataLakeTest" | ||
- name: Test Kyuubi w/ -Pspark-3.1 -Pspark-hadoop-3.2 -Pkyuubi-extension-spark_3.1 | ||
- JOB="build" | ||
- name: Test Kyuubi w/ -Pspark-master w/ Spark 3.2 nightly build | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. remove |
||
env: | ||
- PROFILE="-Pspark-3.1 -Dspark-hadoop-3.2 -Pkyuubi-extension-spark_3.1" | ||
- EXCLUDE_TAGS="org.apache.kyuubi.tags.ExtendedSQLTest,org.apache.kyuubi.tags.DataLakeTest" | ||
- name: Test Kyuubi w/ -Pspark-3.2-snapshot w/ Spark 3.2 nightly build | ||
env: | ||
- PROFILE="-Pspark-3.2-snapshot -pl :kyuubi-spark-sql-engine,:kyuubi-common,:kyuubi-ha,:kyuubi-zookeeper" | ||
- EXCLUDE_TAGS="org.apache.kyuubi.tags.ExtendedSQLTest,org.apache.kyuubi.tags.DataLakeTest" | ||
- PROFILE="-Pspark-master" | ||
- JOB="nightly-build" | ||
- name: Test Kyuubi w/ -Pspark-3.0 TPCDS Tests | ||
env: | ||
- TPCDS_TESTS=true | ||
- PROFILE="-Pspark-3.0" | ||
- JOB="tpcds-tests" | ||
|
||
install: | ||
- mvn --version | ||
|
||
script: | ||
- if [[ "$TPCDS_TESTS" == "true" ]]; then mvn clean install -DskipTests -pl :kyuubi-spark-sql-engine,:kyuubi-common,:kyuubi-ha,:kyuubi-zookeeper && mvn -Dtest=none -DwildcardSuites=org.apache.kyuubi.operation.tpcds.TPCDSOutputSchemaSuite,org.apache.kyuubi.operation.tpcds.TPCDSDDLSuite -Dmaven.plugin.scalatest.exclude.tags="" test; fi | ||
- if [[ "$TPCDS_TESTS" != "true" ]]; then mvn clean install $PROFILE -Dmaven.plugin.scalatest.exclude.tags=$EXCLUDE_TAGS -Dmaven.javadoc.skip=true -V ;fi | ||
- | | ||
if [[ "$JOB" == "build" ]]; then | ||
mvn clean install --no-transfer-progress $PROFILE -Dmaven.javadoc.skip=true -V | ||
fi | ||
- | | ||
if [[ "$JOB" == "nightly-build" ]]; then | ||
mvn clean install --no-transfer-progress $PROFILE -pl :kyuubi-spark-sql-engine,:kyuubi-common,:kyuubi-ha,:kyuubi-zookeeper -Dmaven.javadoc.skip=true -V | ||
fi | ||
- | | ||
if [[ "$JOB" == "tpcds-tests" ]]; then | ||
mvn clean install --no-transfer-progress $PROFILE -DskipTests -pl :kyuubi-spark-sql-engine,:kyuubi-common,:kyuubi-ha,:kyuubi-zookeeper | ||
mvn test --no-transfer-progress $PROFILE -Dtest=none -DwildcardSuites=*TPCDSSuite | ||
fi | ||
|
||
after_success: | ||
- if [[ -z $EXCLUDE_TAGS ]]; then bash <(curl -s https://codecov.io/bash); fi | ||
- if [[ "$CODECOV" == "true" ]]; then bash <(curl -s https://codecov.io/bash); fi | ||
|
||
after_failure: | ||
- echo "==== Build with $PROFILE FAILED ====" | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -27,4 +27,4 @@ | |
@TagAnnotation | ||
@Retention(RetentionPolicy.RUNTIME) | ||
@Target({ElementType.METHOD, ElementType.TYPE}) | ||
public @interface DataLakeTest {} | ||
public @interface DeltaTest {} | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Not a big issue, but shall we add a parent tag for There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think we use tags here to mark tests which not support specific Spark versions, and due to the limitation of scalatest maven plugin, such as collection operation, the workflow scripts are not elegant now. Would you explain the benefit of adding a parent tag? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Not a strong opinion. I'm fine with both. |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
/* | ||
* Licensed to the Apache Software Foundation (ASF) under one or more | ||
* contributor license agreements. See the NOTICE file distributed with | ||
* this work for additional information regarding copyright ownership. | ||
* The ASF licenses this file to You under the Apache License, Version 2.0 | ||
* (the "License"); you may not use this file except in compliance with | ||
* the License. You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
package org.apache.kyuubi.tags; | ||
|
||
import org.scalatest.TagAnnotation; | ||
|
||
import java.lang.annotation.ElementType; | ||
import java.lang.annotation.Retention; | ||
import java.lang.annotation.RetentionPolicy; | ||
import java.lang.annotation.Target; | ||
|
||
@TagAnnotation | ||
@Retention(RetentionPolicy.RUNTIME) | ||
@Target({ElementType.METHOD, ElementType.TYPE}) | ||
public @interface IcebergTest {} |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -1104,6 +1104,20 @@ | |
<groupId>io.delta</groupId> | ||
<artifactId>delta-core_${scala.binary.version}</artifactId> | ||
<version>${delta.version}</version> | ||
<exclusions> | ||
<!-- | ||
Apache Spark already has Antlr and Scala dependencies, and a potential compatibility | ||
issue of Antlr version on delta 1.0.0: https://github.com/delta-io/delta/pull/676 | ||
--> | ||
<exclusion> | ||
<groupId>org.antlr</groupId> | ||
<artifactId>*</artifactId> | ||
</exclusion> | ||
<exclusion> | ||
<groupId>org.scala-lang</groupId> | ||
<artifactId>scala-library</artifactId> | ||
</exclusion> | ||
</exclusions> | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It's better to add some comments at this code place or add a There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. updated |
||
</dependency> | ||
</dependencies> | ||
</dependencyManagement> | ||
|
@@ -1550,20 +1564,27 @@ | |
|
||
<profile> | ||
<id>spark-3.0</id> | ||
<properties> | ||
<spark.version>3.0.2</spark.version> | ||
<delta.version>0.8.0</delta.version> | ||
<maven.plugin.scalatest.exclude.tags></maven.plugin.scalatest.exclude.tags> | ||
</properties> | ||
</profile> | ||
|
||
<profile> | ||
<id>spark-3.2-snapshot</id> | ||
<id>spark-3.1</id> | ||
<properties> | ||
<spark.version>3.2.0-SNAPSHOT</spark.version> | ||
<spark.version>3.1.1</spark.version> | ||
<delta.version>1.0.0</delta.version> | ||
<maven.plugin.scalatest.exclude.tags>org.apache.kyuubi.tags.ExtendedSQLTest,org.apache.kyuubi.tags.IcebergTest</maven.plugin.scalatest.exclude.tags> | ||
</properties> | ||
</profile> | ||
|
||
<profile> | ||
<id>spark-3.1</id> | ||
<id>spark-master</id> | ||
<properties> | ||
<spark.version>3.1.1</spark.version> | ||
<maven.plugin.scalatest.exclude.tags>org.apache.kyuubi.tags.ExtendedSQLTest,org.apache.kyuubi.tags.DataLakeTest</maven.plugin.scalatest.exclude.tags> | ||
<spark.version>3.2.0-SNAPSHOT</spark.version> | ||
<maven.plugin.scalatest.exclude.tags>org.apache.kyuubi.tags.ExtendedSQLTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest</maven.plugin.scalatest.exclude.tags> | ||
</properties> | ||
</profile> | ||
|
||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
not only schema? simply
TPC-DS Tests
?