/
FilterPathsBasedOnPredicateTest.scala
143 lines (116 loc) · 6.16 KB
/
FilterPathsBasedOnPredicateTest.scala
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
package io.github.saurabh975.layers.util
import io.github.saurabh975.layers.common.Predicate.{<, _}
import io.github.saurabh975.layers.base.BaseTest
import io.github.saurabh975.layers.common.{Column, Predicate}
import io.github.saurabh975.layers.exceptions.PartitionColumnNotPresent
import io.github.saurabh975.layers.reader.ORCReader
import org.apache.hadoop.fs.Path
import org.apache.spark.sql.functions._
import java.io.{File, FileNotFoundException}
import scala.reflect.io.Directory
class FilterPathsBasedOnPredicateTest extends BaseTest {
val testingDirectory = new Directory(new File(dir.path.concat("/testingDirectory")))
val basePath = new Path(testingDirectory.path)
override def beforeAll(): Unit = {
testingDirectory.createDirectory()
testingPaths(testingDirectory.path, ".orc")
}
override def afterAll(): Unit = {
testingDirectory.deleteRecursively()
}
"filter" should "return list of paths after applying predicates" in {
val predicates: Map[Column, List[Predicate]] = Map(
Column("string_partn_col") -> List(in("val1", "val2")),
Column("double_partn_col") -> List(>(123.0), <(235.0)),
Column("float_partn_col") -> List(between(1.0F, 4.0F)),
Column("int_partn_col") -> List(equal(234)),
Column("long_partn_col") -> List(>=(2023110518230000L), <=(2023110518250000L))
)
val filteredPaths: List[Path] = FilterPathsBasedOnPredicate.filter(spark, List(basePath), predicates)
val dataWithBasePath = ORCReader.read(spark, basePath.toString)
.filter(col("string_partn_col").isin("val1", "val2")
&& col("double_partn_col") > 123.0
&& col("double_partn_col") < 235.0
&& col("float_partn_col").between(1.0F, 4.0F)
&& col("int_partn_col") === 234
&& col("long_partn_col") >= 2023110518230000L && col("long_partn_col") <= 2023110518250000L)
val dataWithFilteredPaths = ORCReader.read(spark,
Map("basePath" -> basePath.toString),
filteredPaths.map(_.toString): _*)
assert(dataWithBasePath.count() == dataWithFilteredPaths.count())
assert(dataWithBasePath.columns.sorted sameElements dataWithFilteredPaths.columns.sorted)
assert(filteredPaths.length == 48)
}
it should "return list of paths is some partition columns are skipped" in {
val predicates: Map[Column, List[Predicate]] = Map(
Column("double_partn_col") -> List(>=(123.0), <=(235.0)),
Column("float_partn_col") -> List(between(1.0F, 4.0F)),
Column("long_partn_col") -> List(>(2023110518230000L), <(2023110518250000L))
)
val filteredPaths: List[Path] = FilterPathsBasedOnPredicate.filter(spark, List(basePath), predicates)
val dataWithBasePath = ORCReader.read(spark, basePath.toString)
.filter(col("double_partn_col") >= 123.0
&& col("double_partn_col") <= 235.0
&& col("float_partn_col").between(1.0F, 4.0F)
&& col("long_partn_col") > 2023110518230000L && col("long_partn_col") < 2023110518250000L)
val dataWithFilteredPaths = ORCReader.read(spark,
Map("basePath" -> basePath.toString),
filteredPaths.map(_.toString): _*)
assert(dataWithBasePath.count() == dataWithFilteredPaths.count())
assert(dataWithBasePath.columns.sorted sameElements dataWithFilteredPaths.columns.sorted)
assert(filteredPaths.length == 54)
}
it should "throw an error if some column was not present as partition column" in {
val predicates: Map[Column, List[Predicate]] = Map(
Column("random_partn_col") -> List(>=(123.0), <=(235.0)),
Column("float_partn_col") -> List(between(1.0F, 4.0F)),
Column("long_partn_col") -> List(>(2023110518230000L), <(2023110518250000L))
)
assertThrows[PartitionColumnNotPresent](FilterPathsBasedOnPredicate.filter(spark, List(basePath), predicates))
}
it should "throw error" in {
val predicates: Map[Column, List[Predicate]] = Map(
Column("string_partn_col") -> List(equal("val1")),
Column("double_partn_col") -> List(notEqual(123.5), in(234.6, 345.7)),
Column("float_partn_col") -> List(in(1.0F, 2.1F, 3.2F)),
Column("int_partn_col") -> List(in(234)),
Column("long_partn_col") -> List(in(2023110518240000L, 2023110718240000L))
)
assertThrows[FileNotFoundException](
FilterPathsBasedOnPredicate.filter(spark, List(new Path("basePath")), predicates))
}
it should "return list of paths with all in predicates" in {
val predicates: Map[Column, List[Predicate]] = Map(
Column("string_partn_col") -> List(equal("val1")),
Column("double_partn_col") -> List(notEqual(123.5), in(234.6, 345.7)),
Column("float_partn_col") -> List(in(1.0F, 2.1F, 3.2F)),
Column("int_partn_col") -> List(in(234)),
Column("long_partn_col") -> List(in(2023110518240000L, 2023110718240000L))
)
val filteredPaths: List[Path] = FilterPathsBasedOnPredicate.filter(spark, List(basePath), predicates)
val dataWithBasePath = ORCReader.read(spark, basePath.toString)
.filter(col("string_partn_col") === "val1"
&& col("double_partn_col") =!= 123.5
&& col("double_partn_col").isin(234.6, 345.7)
&& col("float_partn_col").isin(1.0, 2.1, 3.2)
&& col("int_partn_col").isin(234)
&& col("long_partn_col").isin(2023110518240000L, 2023110718240000L))
val dataWithFilteredPaths = ORCReader.read(spark,
Map("basePath" -> testingDirectory.path),
filteredPaths.map(_.toString): _*)
assert(dataWithBasePath.count() == dataWithFilteredPaths.count())
assert(dataWithBasePath.columns.sorted sameElements dataWithFilteredPaths.columns.sorted)
assert(filteredPaths.length == 24)
}
it should "return list of paths with < and <=, >, >= and == predicates" in {
val predicates: Map[Column, List[Predicate]] = Map(
Column("string_partn_col") -> List(equal("val1")),
Column("double_partn_col") -> List(notEqual(123.5), in(234.6, 345.7)),
Column("float_partn_col") -> List(<(3.4F), >(0.9F), equal(3.2F)),
Column("int_partn_col") -> List(<(235), >(121) ,<=(234), >=(122)),
Column("long_partn_col") -> List(equal(2023110518240000L))
)
val filteredPaths: List[Path] = FilterPathsBasedOnPredicate.filter(spark, List(basePath), predicates)
assert(filteredPaths.length == 8)
}
}