Skip to content

Commit 5b136d6

Browse files
committed
attempt fix for IT
1 parent 11a4a46 commit 5b136d6

File tree

3 files changed

+31
-2
lines changed

3 files changed

+31
-2
lines changed

plugins/spark/v3.5/integration/build.gradle.kts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ dependencies {
6262
testRuntimeOnly("org.apache.logging.log4j:log4j-slf4j2-impl:2.24.3")
6363

6464
testImplementation("io.delta:delta-spark_${scalaVersion}:3.3.1")
65-
testImplementation("org.apache.hudi:hudi-spark-datasource:0.15.0")
65+
testImplementation("org.apache.hudi:hudi-spark3.5-bundle_2.12:0.15.0")
6666

6767
testImplementation(platform(libs.jackson.bom))
6868
testImplementation("com.fasterxml.jackson.core:jackson-annotations")

plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkHudiIT.java

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@
3232
import org.apache.spark.sql.Dataset;
3333
import org.apache.spark.sql.Row;
3434
import org.apache.spark.sql.RowFactory;
35+
import org.apache.spark.sql.SparkSession;
3536
import org.apache.spark.sql.types.DataTypes;
3637
import org.apache.spark.sql.types.Metadata;
3738
import org.apache.spark.sql.types.StructField;
@@ -43,6 +44,34 @@
4344

4445
@QuarkusIntegrationTest
4546
public class SparkHudiIT extends SparkIntegrationBase {
47+
48+
@Override
49+
protected SparkSession.Builder withCatalog(SparkSession.Builder builder, String catalogName) {
50+
return builder
51+
.config(
52+
"spark.sql.extensions",
53+
"org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
54+
.config(
55+
"spark.sql.catalog.spark_catalog", "org.apache.spark.sql.hudi.catalog.HoodieCatalog")
56+
.config(
57+
String.format("spark.sql.catalog.%s", catalogName),
58+
"org.apache.polaris.spark.SparkCatalog")
59+
.config("spark.sql.warehouse.dir", warehouseDir.toString())
60+
.config(String.format("spark.sql.catalog.%s.type", catalogName), "rest")
61+
.config(
62+
String.format("spark.sql.catalog.%s.uri", catalogName),
63+
endpoints.catalogApiEndpoint().toString())
64+
.config(String.format("spark.sql.catalog.%s.warehouse", catalogName), catalogName)
65+
.config(String.format("spark.sql.catalog.%s.scope", catalogName), "PRINCIPAL_ROLE:ALL")
66+
.config(
67+
String.format("spark.sql.catalog.%s.header.realm", catalogName), endpoints.realmId())
68+
.config(String.format("spark.sql.catalog.%s.token", catalogName), sparkToken)
69+
.config(String.format("spark.sql.catalog.%s.s3.access-key-id", catalogName), "fakekey")
70+
.config(
71+
String.format("spark.sql.catalog.%s.s3.secret-access-key", catalogName), "fakesecret")
72+
.config(String.format("spark.sql.catalog.%s.s3.region", catalogName), "us-west-2");
73+
}
74+
4675
private String defaultNs;
4776
private String tableRootDir;
4877

plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ protected SparkSession.Builder withCatalog(SparkSession.Builder builder, String
3939
return builder
4040
.config(
4141
"spark.sql.extensions",
42-
"org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension,org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
42+
"org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions,io.delta.sql.DeltaSparkSessionExtension")
4343
.config(
4444
"spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog")
4545
.config(

0 commit comments

Comments
 (0)