From 288a2928b30c66910a40e9d725d2399bb65447da Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Thu, 7 Aug 2025 10:51:19 -0700 Subject: [PATCH] [SPARK-53179][CORE][TESTS] Use `SparkStreamUtils.toString` instead of `CharStreams.toString` --- .../ExternalShuffleBlockResolverSuite.java | 15 ++++----------- .../deploy/ExternalShuffleServiceDbSuite.scala | 5 +---- .../netty/NettyBlockTransferSecuritySuite.scala | 8 ++------ dev/checkstyle.xml | 4 ++++ scalastyle-config.xml | 5 +++++ 5 files changed, 16 insertions(+), 21 deletions(-) diff --git a/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolverSuite.java b/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolverSuite.java index 311827dbed4c5..3e068d4573688 100644 --- a/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolverSuite.java +++ b/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolverSuite.java @@ -19,15 +19,14 @@ import java.io.IOException; import java.io.InputStream; -import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.io.CharStreams; import org.apache.spark.network.shuffle.protocol.ExecutorShuffleInfo; import org.apache.spark.network.util.MapConfigProvider; import org.apache.spark.network.util.TransportConf; import org.apache.spark.network.shuffle.ExternalShuffleBlockResolver.AppExecId; +import org.apache.spark.util.SparkStreamUtils$; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -83,23 +82,17 @@ public void testSortShuffleBlocks() throws IOException { try (InputStream block0Stream = resolver.getBlockData( "app0", "exec0", 0, 0, 0).createInputStream()) { - String block0 = - CharStreams.toString(new InputStreamReader(block0Stream, StandardCharsets.UTF_8)); - assertEquals(sortBlock0, block0); + assertEquals(sortBlock0, SparkStreamUtils$.MODULE$.toString(block0Stream)); } try (InputStream block1Stream = resolver.getBlockData( "app0", "exec0", 0, 0, 1).createInputStream()) { - String block1 = - CharStreams.toString(new InputStreamReader(block1Stream, StandardCharsets.UTF_8)); - assertEquals(sortBlock1, block1); + assertEquals(sortBlock1, SparkStreamUtils$.MODULE$.toString(block1Stream)); } try (InputStream blocksStream = resolver.getContinuousBlocksData( "app0", "exec0", 0, 0, 0, 2).createInputStream()) { - String blocks = - CharStreams.toString(new InputStreamReader(blocksStream, StandardCharsets.UTF_8)); - assertEquals(sortBlock0 + sortBlock1, blocks); + assertEquals(sortBlock0 + sortBlock1, SparkStreamUtils$.MODULE$.toString(blocksStream)); } } diff --git a/core/src/test/scala/org/apache/spark/deploy/ExternalShuffleServiceDbSuite.scala b/core/src/test/scala/org/apache/spark/deploy/ExternalShuffleServiceDbSuite.scala index 921175bd41038..224f47f1d89c7 100644 --- a/core/src/test/scala/org/apache/spark/deploy/ExternalShuffleServiceDbSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/ExternalShuffleServiceDbSuite.scala @@ -17,11 +17,8 @@ package org.apache.spark.deploy -import java.io._ import java.nio.charset.StandardCharsets -import com.google.common.io.CharStreams - import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite} import org.apache.spark.internal.config._ import org.apache.spark.network.shuffle.{ExternalBlockHandler, ExternalShuffleBlockResolver} @@ -110,7 +107,7 @@ abstract class ExternalShuffleServiceDbSuite extends SparkFunSuite { blockResolver = blockHandler.getBlockResolver val block0Stream = blockResolver.getBlockData("app0", "exec0", 0, 0, 0).createInputStream - val block0 = CharStreams.toString(new InputStreamReader(block0Stream, StandardCharsets.UTF_8)) + val block0 = Utils.toString(block0Stream) block0Stream.close() assert(sortBlock0 == block0) // pass diff --git a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala index 124c089b47108..2733800727c0f 100644 --- a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala +++ b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala @@ -17,7 +17,6 @@ package org.apache.spark.network.netty -import java.io.InputStreamReader import java.nio._ import java.nio.charset.StandardCharsets import java.util.concurrent.TimeUnit @@ -26,7 +25,6 @@ import scala.concurrent.Promise import scala.concurrent.duration._ import scala.util.{Failure, Success, Try} -import com.google.common.io.CharStreams import org.mockito.Mockito._ import org.scalatest.matchers.must.Matchers import org.scalatest.matchers.should.Matchers._ @@ -40,7 +38,7 @@ import org.apache.spark.network.buffer.{ManagedBuffer, NioManagedBuffer} import org.apache.spark.network.shuffle.BlockFetchingListener import org.apache.spark.serializer.{JavaSerializer, SerializerManager} import org.apache.spark.storage.{BlockId, ShuffleBlockId} -import org.apache.spark.util.{SslTestUtils, ThreadUtils} +import org.apache.spark.util.{SslTestUtils, ThreadUtils, Utils} class NettyBlockTransferSecuritySuite extends SparkFunSuite with MockitoSugar with Matchers { @@ -150,9 +148,7 @@ class NettyBlockTransferSecuritySuite extends SparkFunSuite with MockitoSugar wi val result = fetchBlock(exec0, exec1, "1", blockId) match { case Success(buf) => - val actualString = CharStreams.toString( - new InputStreamReader(buf.createInputStream(), StandardCharsets.UTF_8)) - actualString should equal(blockString) + Utils.toString(buf.createInputStream()) should equal(blockString) buf.release() Success(()) case Failure(t) => diff --git a/dev/checkstyle.xml b/dev/checkstyle.xml index 7d34afc99345c..f9f08ed124fc7 100644 --- a/dev/checkstyle.xml +++ b/dev/checkstyle.xml @@ -229,6 +229,10 @@ + + + + diff --git a/scalastyle-config.xml b/scalastyle-config.xml index 75794f0bbc1a5..abb771a78d37e 100644 --- a/scalastyle-config.xml +++ b/scalastyle-config.xml @@ -722,6 +722,11 @@ This file is divided into 3 sections: Use toString of SparkStreamUtils or Utils instead. + + \bCharStreams\.toString\b + Use toString of SparkStreamUtils or Utils instead. + + \bIOUtils\.write\b Use Java `write` instead.