Skip to content

Commit

Permalink
fix run embedded mode
Browse files Browse the repository at this point in the history
  • Loading branch information
FANNG1 committed May 13, 2024
1 parent 922dd3d commit e58c1d1
Show file tree
Hide file tree
Showing 9 changed files with 25 additions and 30 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,13 @@
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@Disabled
@Tag("gravitino-docker-it")
public class TrinoConnectorIT extends AbstractIT {
public static final Logger LOG = LoggerFactory.getLogger(TrinoConnectorIT.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,13 @@
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@Disabled
@Tag("gravitino-docker-it")
public class TrinoQueryIT extends TrinoQueryITBase {
private static final Logger LOG = LoggerFactory.getLogger(TrinoQueryIT.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ private static class ObjectMapperHolder {
.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false)
.configure(EnumFeature.WRITE_ENUMS_TO_LOWERCASE, true)
.enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS)
.disable(SerializationFeature.FAIL_ON_EMPTY_BEANS)
.build()
.setSerializationInclusion(JsonInclude.Include.NON_NULL)
.registerModule(new JavaTimeModule());
Expand Down
25 changes: 10 additions & 15 deletions spark-connector/spark-connector/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,8 @@ dependencies {
implementation("org.apache.iceberg:iceberg-spark-runtime-${sparkMajorVersion}_$scalaVersion:$icebergVersion")
implementation("org.apache.kyuubi:kyuubi-spark-connector-hive_$scalaVersion:$kyuubiVersion")

compileOnly("org.apache.spark:spark-catalyst_$scalaVersion:$sparkVersion") {
exclude("org.apache.hadoop", "hadoop-client-api")
exclude("org.apache.hadoop", "hadoop-client-runtime")
}
compileOnly("org.apache.spark:spark-sql_$scalaVersion:$sparkVersion") {
exclude("org.apache.hadoop", "hadoop-client-api")
exclude("org.apache.hadoop", "hadoop-client-runtime")
}
compileOnly("org.apache.spark:spark-catalyst_$scalaVersion:$sparkVersion")
compileOnly("org.apache.spark:spark-sql_$scalaVersion:$sparkVersion")
compileOnly("org.scala-lang.modules:scala-java8-compat_$scalaVersion:$scalaJava8CompatVersion")
annotationProcessor(libs.lombok)
compileOnly(libs.lombok)
Expand All @@ -47,6 +41,7 @@ dependencies {

testImplementation(libs.hive2.common) {
exclude("org.apache.curator")
// use hadoop from Spark
exclude("org.apache.hadoop")
exclude("org.eclipse.jetty.aggregate", "jetty-all")
exclude("org.eclipse.jetty.orbit", "javax.servlet")
Expand All @@ -71,19 +66,19 @@ dependencies {
exclude("org.slf4j")
}
testImplementation(libs.junit.jupiter.api)
testRuntimeOnly(libs.junit.jupiter.engine)
testImplementation(libs.junit.jupiter.params)
testImplementation(libs.mysql.driver)
testImplementation(libs.testcontainers)

testImplementation("org.apache.spark:spark-catalyst_$scalaVersion:$sparkVersion") {
exclude("org.apache.curator")
}
// include hive common
// include spark-sql,spark-catalyst,hive-common,hdfs-client
testImplementation("org.apache.spark:spark-hive_$scalaVersion:$sparkVersion") {
exclude("org.apache.hadoop")
// conflict with Gravitino server jersey
exclude("org.glassfish.jersey.core")
exclude("org.glassfish.jersey.containers")
exclude("org.glassfish.jersey.inject")
}
testImplementation("org.apache.spark:spark-sql_$scalaVersion:$sparkVersion")

testRuntimeOnly(libs.junit.jupiter.engine)
}

tasks.test {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import javax.ws.rs.NotSupportedException;
import lombok.Getter;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.tuple.Pair;
Expand Down Expand Up @@ -117,7 +116,7 @@ public Transform[] toGravitinoPartitionings(
getFieldNameFromGravitinoNamedReference(
(NamedReference) toGravitinoNamedReference(transform.references()[0])));
} else {
throw new NotSupportedException(
throw new UnsupportedOperationException(
"Doesn't support Spark transform: " + transform.name());
}
})
Expand Down Expand Up @@ -146,7 +145,7 @@ public DistributionAndSortOrdersInfo toGravitinoDistributionAndSortOrders(
Distribution distribution = toGravitinoDistribution(bucketTransform);
distributionAndSortOrdersInfo.setDistribution(distribution);
} else {
throw new NotSupportedException(
throw new UnsupportedOperationException(
"Only support BucketTransform and SortedBucketTransform, but get: "
+ transform.name());
}
Expand Down Expand Up @@ -300,7 +299,7 @@ private static org.apache.spark.sql.connector.expressions.Transform toSparkBucke
}
// Spark doesn't support EVEN or RANGE distribution
default:
throw new NotSupportedException(
throw new UnsupportedOperationException(
"Doesn't support distribution strategy: " + distribution.strategy());
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import javax.ws.rs.NotSupportedException;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.sql.connector.catalog.TableCatalog;

Expand Down Expand Up @@ -106,7 +105,7 @@ public Map<String, String> toGravitinoTableProperties(Map<String, String> proper
gravitinoTableProperties.put(
HivePropertiesConstants.GRAVITINO_HIVE_FORMAT, gravitinoFormat);
} else {
throw new NotSupportedException("Doesn't support hive file format: " + fileFormat);
throw new UnsupportedOperationException("Doesn't support hive file format: " + fileFormat);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import javax.ws.rs.NotSupportedException;
import org.apache.spark.sql.connector.expressions.BucketTransform;
import org.apache.spark.sql.connector.expressions.Expressions;
import org.apache.spark.sql.connector.expressions.FieldReference;
Expand Down Expand Up @@ -106,11 +105,11 @@ void testGravitinoToSparkDistributionWithoutSortOrder(boolean supportsBucketPart
if (!supportsBucketPartition) {
// range and even distribution
Assertions.assertThrowsExactly(
NotSupportedException.class,
UnsupportedOperationException.class,
() -> sparkTransformConverter.toSparkTransform(null, Distributions.RANGE, null));
Distribution evenDistribution = Distributions.even(bucketNum, NamedReference.field(""));
Assertions.assertThrowsExactly(
NotSupportedException.class,
UnsupportedOperationException.class,
() -> sparkTransformConverter.toSparkTransform(null, evenDistribution, null));
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import com.datastrato.gravitino.spark.connector.PropertiesConverter;
import com.google.common.collect.ImmutableMap;
import java.util.Map;
import javax.ws.rs.NotSupportedException;
import org.apache.spark.sql.connector.catalog.TableCatalog;
import org.apache.spark.sql.util.CaseInsensitiveStringMap;
import org.junit.jupiter.api.Assertions;
Expand All @@ -31,7 +30,7 @@ void testTableFormat() {
Assertions.assertEquals(
"PARQUET", hiveProperties.get(HivePropertiesConstants.GRAVITINO_HIVE_FORMAT));
Assertions.assertThrowsExactly(
NotSupportedException.class,
UnsupportedOperationException.class,
() ->
hivePropertiesConverter.toGravitinoTableProperties(
ImmutableMap.of(HivePropertiesConstants.SPARK_HIVE_STORED_AS, "notExists")));
Expand All @@ -49,7 +48,7 @@ void testTableFormat() {
HivePropertiesConstants.GRAVITINO_HIVE_FORMAT_TEXTFILE,
hiveProperties.get(HivePropertiesConstants.GRAVITINO_HIVE_FORMAT));
Assertions.assertThrowsExactly(
NotSupportedException.class,
UnsupportedOperationException.class,
() ->
hivePropertiesConverter.toGravitinoTableProperties(
ImmutableMap.of(TableCatalog.PROP_PROVIDER, "notExists")));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import javax.ws.rs.NotSupportedException;
import lombok.Data;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.sql.connector.catalog.SupportsMetadataColumns;
Expand Down Expand Up @@ -88,7 +87,7 @@ void addPartition(Transform partition) {
|| (partition instanceof ApplyTransform && "truncate".equalsIgnoreCase(partition.name()))) {
this.partitions.add(partition);
} else {
throw new NotSupportedException("Doesn't support " + partition.name());
throw new UnsupportedOperationException("Doesn't support " + partition.name());
}
}

Expand Down Expand Up @@ -133,7 +132,7 @@ static SparkTableInfo create(Table baseTable) {
&& "truncate".equalsIgnoreCase(transform.name()))) {
sparkTableInfo.addPartition(transform);
} else {
throw new NotSupportedException(
throw new UnsupportedOperationException(
"Doesn't support Spark transform: " + transform.name());
}
});
Expand Down

0 comments on commit e58c1d1

Please sign in to comment.