-
Notifications
You must be signed in to change notification settings - Fork 28k
/
TimeWindow.scala
165 lines (149 loc) · 6.36 KB
/
TimeWindow.scala
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.TypeCheckFailure
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, CodeGenerator, ExprCode}
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.trees.TreePattern.{TIME_WINDOW, TreePattern}
import org.apache.spark.sql.catalyst.util.DateTimeConstants.MICROS_PER_DAY
import org.apache.spark.sql.catalyst.util.IntervalUtils
import org.apache.spark.sql.errors.QueryCompilationErrors
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
case class TimeWindow(
timeColumn: Expression,
windowDuration: Long,
slideDuration: Long,
startTime: Long) extends UnaryExpression
with ImplicitCastInputTypes
with Unevaluable
with NonSQLExpression {
//////////////////////////
// SQL Constructors
//////////////////////////
def this(
timeColumn: Expression,
windowDuration: Expression,
slideDuration: Expression,
startTime: Expression) = {
this(timeColumn, TimeWindow.parseExpression(windowDuration),
TimeWindow.parseExpression(slideDuration), TimeWindow.parseExpression(startTime))
}
def this(timeColumn: Expression, windowDuration: Expression, slideDuration: Expression) = {
this(timeColumn, TimeWindow.parseExpression(windowDuration),
TimeWindow.parseExpression(slideDuration), 0)
}
def this(timeColumn: Expression, windowDuration: Expression) = {
this(timeColumn, windowDuration, windowDuration)
}
override def child: Expression = timeColumn
override def inputTypes: Seq[AbstractDataType] = Seq(AnyTimestampType)
override def dataType: DataType = new StructType()
.add(StructField("start", child.dataType))
.add(StructField("end", child.dataType))
override def prettyName: String = "window"
final override val nodePatterns: Seq[TreePattern] = Seq(TIME_WINDOW)
// This expression is replaced in the analyzer.
override lazy val resolved = false
/**
* Validate the inputs for the window duration, slide duration, and start time in addition to
* the input data type.
*/
override def checkInputDataTypes(): TypeCheckResult = {
val dataTypeCheck = super.checkInputDataTypes()
if (dataTypeCheck.isSuccess) {
if (windowDuration <= 0) {
return TypeCheckFailure(s"The window duration ($windowDuration) must be greater than 0.")
}
if (slideDuration <= 0) {
return TypeCheckFailure(s"The slide duration ($slideDuration) must be greater than 0.")
}
if (slideDuration > windowDuration) {
return TypeCheckFailure(s"The slide duration ($slideDuration) must be less than or equal" +
s" to the windowDuration ($windowDuration).")
}
if (startTime.abs >= slideDuration) {
return TypeCheckFailure(s"The absolute value of start time ($startTime) must be less " +
s"than the slideDuration ($slideDuration).")
}
}
dataTypeCheck
}
override protected def withNewChildInternal(newChild: Expression): TimeWindow =
copy(timeColumn = newChild)
}
object TimeWindow {
/**
* Parses the interval string for a valid time duration. CalendarInterval expects interval
* strings to start with the string `interval`. For usability, we prepend `interval` to the string
* if the user omitted it.
*
* @param interval The interval string
* @return The interval duration in microseconds. SparkSQL casts TimestampType has microsecond
* precision.
*/
def getIntervalInMicroSeconds(interval: String): Long = {
val cal = IntervalUtils.stringToInterval(UTF8String.fromString(interval))
if (cal.months != 0) {
throw new IllegalArgumentException(
s"Intervals greater than a month is not supported ($interval).")
}
cal.days * MICROS_PER_DAY + cal.microseconds
}
/**
* Parses the duration expression to generate the long value for the original constructor so
* that we can use `window` in SQL.
*/
def parseExpression(expr: Expression): Long = expr match {
case NonNullLiteral(s, StringType) => getIntervalInMicroSeconds(s.toString)
case IntegerLiteral(i) => i.toLong
case NonNullLiteral(l, LongType) => l.toString.toLong
case _ => throw QueryCompilationErrors.invalidLiteralForWindowDurationError()
}
def apply(
timeColumn: Expression,
windowDuration: String,
slideDuration: String,
startTime: String): TimeWindow = {
TimeWindow(timeColumn,
getIntervalInMicroSeconds(windowDuration),
getIntervalInMicroSeconds(slideDuration),
getIntervalInMicroSeconds(startTime))
}
}
/**
* Expression used internally to convert the TimestampType to Long and back without losing
* precision, i.e. in microseconds. Used in time windowing.
*/
case class PreciseTimestampConversion(
child: Expression,
fromType: DataType,
toType: DataType) extends UnaryExpression with ExpectsInputTypes with NullIntolerant {
override def inputTypes: Seq[AbstractDataType] = Seq(fromType)
override def dataType: DataType = toType
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val eval = child.genCode(ctx)
ev.copy(code = eval.code +
code"""boolean ${ev.isNull} = ${eval.isNull};
|${CodeGenerator.javaType(dataType)} ${ev.value} = ${eval.value};
""".stripMargin)
}
override def nullSafeEval(input: Any): Any = input
override protected def withNewChildInternal(newChild: Expression): PreciseTimestampConversion =
copy(child = newChild)
}