|
109 | 109 | <hadoop.version>3.3.1</hadoop.version>
|
110 | 110 | <hadoop.binary.version>3.2</hadoop.binary.version>
|
111 | 111 | <hive.version>2.3.9</hive.version>
|
112 |
| - <hudi.version>0.9.0</hudi.version> |
| 112 | + <hudi.version>0.10.0</hudi.version> |
113 | 113 | <iceberg.name>iceberg-spark3-runtime</iceberg.name>
|
114 | 114 | <iceberg.version>0.12.1</iceberg.version>
|
115 | 115 | <jackson.version>2.12.5</jackson.version>
|
|
777 | 777 | </dependency>
|
778 | 778 |
|
779 | 779 | <!-- Hudi dependency -->
|
| 780 | + <!-- |
| 781 | + We don't use hadoop-common directly, it's only for suppressing exception: |
| 782 | + Failed to execute goal org.apache.maven.plugins:maven-shade-plugin:3.2.4:shade (default) on project |
| 783 | + kyuubi-spark-sql-engine_2.12: Error creating shaded jar: Could not resolve following dependencies: |
| 784 | + [jdk.tools:jdk.tools:jar:1.6 (system)] |
| 785 | +
|
| 786 | + The issue only occurs on GitHub Action environment with Hudi 0.10.0 and JDK 11. |
| 787 | + After few days digging, only found one place introduces jdk.tools, |
| 788 | +
|
| 789 | + - org.apache.hudi:hudi-common:jar:0.10.0:test |
| 790 | + - org.apache.hbase:hbase-server:jar:1.2.3:test |
| 791 | + - org.apache.hadoop:hadoop-common:jar:2.5.1:test |
| 792 | + - org.apache.hadoop:hadoop-annotations:jar:2.5.1:test |
| 793 | + - jdk.tools:jdk.tools:jar:1.6:system |
| 794 | + --> |
| 795 | + <dependency> |
| 796 | + <groupId>org.apache.hadoop</groupId> |
| 797 | + <artifactId>hadoop-common</artifactId> |
| 798 | + <version>${hadoop.version}</version> |
| 799 | + </dependency> |
| 800 | + |
780 | 801 | <dependency>
|
781 | 802 | <groupId>org.apache.parquet</groupId>
|
782 | 803 | <artifactId>parquet-avro</artifactId>
|
|
794 | 815 | <artifactId>hudi-spark-common_${scala.binary.version}</artifactId>
|
795 | 816 | <version>${hudi.version}</version>
|
796 | 817 | <exclusions>
|
| 818 | + <exclusion> |
| 819 | + <groupId>org.scala-lang</groupId> |
| 820 | + <artifactId>scala-library</artifactId> |
| 821 | + </exclusion> |
797 | 822 | <exclusion>
|
798 | 823 | <groupId>org.apache.hudi</groupId>
|
799 | 824 | <artifactId>hudi-timeline-service</artifactId>
|
|
826 | 851 | <groupId>org.apache.orc</groupId>
|
827 | 852 | <artifactId>*</artifactId>
|
828 | 853 | </exclusion>
|
| 854 | + <exclusion> |
| 855 | + <groupId>org.apache.hudi</groupId> |
| 856 | + <artifactId>hudi-aws</artifactId> |
| 857 | + </exclusion> |
829 | 858 | </exclusions>
|
830 | 859 | </dependency>
|
831 | 860 |
|
|
834 | 863 | <artifactId>hudi-spark_${scala.binary.version}</artifactId>
|
835 | 864 | <version>${hudi.version}</version>
|
836 | 865 | <exclusions>
|
| 866 | + <exclusion> |
| 867 | + <groupId>org.scala-lang</groupId> |
| 868 | + <artifactId>scala-library</artifactId> |
| 869 | + </exclusion> |
837 | 870 | <exclusion>
|
838 | 871 | <groupId>org.apache.hudi</groupId>
|
839 | 872 | <artifactId>hudi-spark-common_2.11</artifactId>
|
|
1677 | 1710 | <properties>
|
1678 | 1711 | <spark.version>3.0.3</spark.version>
|
1679 | 1712 | <delta.version>0.8.0</delta.version>
|
1680 |
| - <maven.plugin.scalatest.exclude.tags>org.apache.kyuubi.tags.ExtendedSQLTest</maven.plugin.scalatest.exclude.tags> |
| 1713 | + <!-- Hudi 0.10.0 still support Spark 3.0, but need build by user with specific profile --> |
| 1714 | + <maven.plugin.scalatest.exclude.tags>org.apache.kyuubi.tags.ExtendedSQLTest,org.apache.kyuubi.tags.HudiTest</maven.plugin.scalatest.exclude.tags> |
1681 | 1715 | </properties>
|
1682 | 1716 | </profile>
|
1683 | 1717 |
|
|
1686 | 1720 | <properties>
|
1687 | 1721 | <spark.version>3.1.2</spark.version>
|
1688 | 1722 | <delta.version>1.0.0</delta.version>
|
1689 |
| - <maven.plugin.scalatest.exclude.tags>org.apache.kyuubi.tags.ExtendedSQLTest,org.apache.kyuubi.tags.HudiTest</maven.plugin.scalatest.exclude.tags> |
| 1723 | + <maven.plugin.scalatest.exclude.tags>org.apache.kyuubi.tags.ExtendedSQLTest</maven.plugin.scalatest.exclude.tags> |
1690 | 1724 | </properties>
|
1691 | 1725 | <modules>
|
1692 | 1726 | <module>dev/kyuubi-extension-spark-common</module>
|
|
0 commit comments