From 16b4eba1dbba622dc79476f346677bf91e3caff4 Mon Sep 17 00:00:00 2001 From: Cheng Pan Date: Sun, 8 Feb 2026 22:53:41 +0800 Subject: [PATCH] [SPARK-55428][BUILD] Sync Netty JAVA_OPTS everywhere --- R/run-tests.sh | 4 ++-- sql/catalyst/pom.xml | 2 +- sql/connect/bin/spark-connect-scala-client | 2 ++ sql/core/pom.xml | 2 +- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/R/run-tests.sh b/R/run-tests.sh index 3a90b44c2b659..59186fd3a74f7 100755 --- a/R/run-tests.sh +++ b/R/run-tests.sh @@ -30,9 +30,9 @@ if [[ $(echo $SPARK_AVRO_JAR_PATH | wc -l) -eq 1 ]]; then fi if [ -z "$SPARK_JARS" ]; then - SPARKR_SUPPRESS_DEPRECATION_WARNING=1 SPARK_TESTING=1 NOT_CRAN=true $FWDIR/../bin/spark-submit --driver-java-options "-Dlog4j.configurationFile=file:$FWDIR/log4j2.properties" --conf spark.hadoop.fs.defaultFS="file:///" --conf spark.driver.extraJavaOptions="-Dio.netty.tryReflectionSetAccessible=true -Xss4M" --conf spark.executor.extraJavaOptions="-Dio.netty.tryReflectionSetAccessible=true -Xss4M" $FWDIR/pkg/tests/run-all.R 2>&1 | tee -a $LOGFILE + SPARKR_SUPPRESS_DEPRECATION_WARNING=1 SPARK_TESTING=1 NOT_CRAN=true $FWDIR/../bin/spark-submit --driver-java-options "-Dlog4j.configurationFile=file:$FWDIR/log4j2.properties" --conf spark.hadoop.fs.defaultFS="file:///" --conf spark.driver.extraJavaOptions="-Xss4M" --conf spark.executor.extraJavaOptions="-Xss4M" $FWDIR/pkg/tests/run-all.R 2>&1 | tee -a $LOGFILE else - SPARKR_SUPPRESS_DEPRECATION_WARNING=1 SPARK_TESTING=1 NOT_CRAN=true $FWDIR/../bin/spark-submit --jars $SPARK_JARS --driver-java-options "-Dlog4j.configurationFile=file:$FWDIR/log4j2.properties" --conf spark.hadoop.fs.defaultFS="file:///" --conf spark.driver.extraJavaOptions="-Dio.netty.tryReflectionSetAccessible=true -Xss4M" --conf spark.executor.extraJavaOptions="-Dio.netty.tryReflectionSetAccessible=true -Xss4M" $FWDIR/pkg/tests/run-all.R 2>&1 | tee -a $LOGFILE + SPARKR_SUPPRESS_DEPRECATION_WARNING=1 SPARK_TESTING=1 NOT_CRAN=true $FWDIR/../bin/spark-submit --jars $SPARK_JARS --driver-java-options "-Dlog4j.configurationFile=file:$FWDIR/log4j2.properties" --conf spark.hadoop.fs.defaultFS="file:///" --conf spark.driver.extraJavaOptions="-Xss4M" --conf spark.executor.extraJavaOptions="-Xss4M" $FWDIR/pkg/tests/run-all.R 2>&1 | tee -a $LOGFILE fi FAILED=$((PIPESTATUS[0]||$FAILED)) diff --git a/sql/catalyst/pom.xml b/sql/catalyst/pom.xml index 9b7fb89ddd9e4..fc4ed86bcabb5 100644 --- a/sql/catalyst/pom.xml +++ b/sql/catalyst/pom.xml @@ -173,7 +173,7 @@ org.scalatest scalatest-maven-plugin - -ea -Xmx4g -Xss4m -XX:ReservedCodeCacheSize=${CodeCacheSize} ${extraJavaTestArgs} -Dio.netty.tryReflectionSetAccessible=true + -ea -Xmx4g -Xss4m -XX:ReservedCodeCacheSize=${CodeCacheSize} ${extraJavaTestArgs} diff --git a/sql/connect/bin/spark-connect-scala-client b/sql/connect/bin/spark-connect-scala-client index 4d508e626df73..7f90353cc39b7 100755 --- a/sql/connect/bin/spark-connect-scala-client +++ b/sql/connect/bin/spark-connect-scala-client @@ -71,6 +71,8 @@ JVM_ARGS="-XX:+IgnoreUnrecognizedVMOptions \ --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED \ -Djdk.reflect.useDirectMethodHandle=false \ -Dio.netty.tryReflectionSetAccessible=true \ + -Dio.netty.allocator.type=pooled \ + -Dio.netty.handler.ssl.defaultEndpointVerificationAlgorithm=NONE \ --enable-native-access=ALL-UNNAMED \ $SCJVM_ARGS" diff --git a/sql/core/pom.xml b/sql/core/pom.xml index ab6a8f8182e64..ce6e7dcdda6cc 100644 --- a/sql/core/pom.xml +++ b/sql/core/pom.xml @@ -344,7 +344,7 @@ org.scalatest scalatest-maven-plugin - -ea -Xmx4g -Xss4m -XX:ReservedCodeCacheSize=${CodeCacheSize} ${extraJavaTestArgs} -Dio.netty.tryReflectionSetAccessible=true + -ea -Xmx4g -Xss4m -XX:ReservedCodeCacheSize=${CodeCacheSize} ${extraJavaTestArgs}