Skip to content

Commit

Permalink
[SPARK-34118][CORE][SQL][2.4] Replaces filter and check for emptiness…
Browse files Browse the repository at this point in the history
… with exists or forall

### What changes were proposed in this pull request?
This pr use `exists` or `forall` to simplify `filter + emptiness check`, it's semantically consistent, but looks simpler. The rule as follow:

- `seq.filter(p).size == 0)` -> `!seq.exists(p)`
- `seq.filter(p).length > 0` -> `seq.exists(p)`
- `seq.filterNot(p).isEmpty` -> `seq.forall(p)`
- `seq.filterNot(p).nonEmpty` -> `!seq.forall(p)`

### Why are the changes needed?
Code Simpilefications.

### Does this PR introduce _any_ user-facing change?
No

### How was this patch tested?
Pass the Jenkins or GitHub Action

Closes #31192 from LuciferYang/SPARK-34118-24.

Authored-by: yangjie01 <yangjie01@baidu.com>
Signed-off-by: HyukjinKwon <gurwls223@apache.org>
  • Loading branch information
LuciferYang authored and HyukjinKwon committed Jan 15, 2021
1 parent 63e93a5 commit 7ae6c8d
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 6 deletions.
4 changes: 2 additions & 2 deletions core/src/main/scala/org/apache/spark/api/r/RUtils.scala
Expand Up @@ -43,9 +43,9 @@ private[spark] object RUtils {
* Check if SparkR is installed before running tests that use SparkR.
*/
def isSparkRInstalled: Boolean = {
localSparkRPackagePath.filter { pkgDir =>
localSparkRPackagePath.exists { pkgDir =>
new File(Seq(pkgDir, "SparkR").mkString(File.separator)).exists
}.isDefined
}
}

/**
Expand Down
Expand Up @@ -338,7 +338,7 @@ class FileAppenderSuite extends SparkFunSuite with BeforeAndAfter with Logging {
assert(generatedFiles.size > 1)
if (isCompressed) {
assert(
generatedFiles.filter(_.getName.endsWith(RollingFileAppender.GZIP_LOG_SUFFIX)).size > 0)
generatedFiles.exists(_.getName.endsWith(RollingFileAppender.GZIP_LOG_SUFFIX)))
}
val allText = generatedFiles.map { file =>
if (file.getName.endsWith(RollingFileAppender.GZIP_LOG_SUFFIX)) {
Expand Down
Expand Up @@ -694,7 +694,7 @@ class JDBCSuite extends QueryTest
test("Remap types via JdbcDialects") {
JdbcDialects.registerDialect(testH2Dialect)
val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", new Properties())
assert(df.schema.filter(_.dataType != org.apache.spark.sql.types.StringType).isEmpty)
assert(!df.schema.exists(_.dataType != org.apache.spark.sql.types.StringType))
val rows = df.collect()
assert(rows(0).get(0).isInstanceOf[String])
assert(rows(0).get(1).isInstanceOf[String])
Expand Down
Expand Up @@ -36,8 +36,8 @@ class DataSourceV2UtilsSuite extends SparkFunSuite {
val cs = classOf[DataSourceV2WithSessionConfig].newInstance()
val confs = DataSourceV2Utils.extractSessionConfigs(cs.asInstanceOf[DataSourceV2], conf)
assert(confs.size == 2)
assert(confs.keySet.filter(_.startsWith("spark.datasource")).size == 0)
assert(confs.keySet.filter(_.startsWith("not.exist.prefix")).size == 0)
assert(!confs.keySet.exists(_.startsWith("spark.datasource")))
assert(!confs.keySet.exists(_.startsWith("not.exist.prefix")))
assert(confs.keySet.contains("foo.bar"))
assert(confs.keySet.contains("whateverConfigName"))
}
Expand Down

0 comments on commit 7ae6c8d

Please sign in to comment.