New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[SPARK-22100] [SQL] Make percentile_approx support date/timestamp type and change the output type to be the same as input type #19321
Changes from 4 commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -85,7 +85,8 @@ case class ApproximatePercentile( | |
private lazy val accuracy: Int = accuracyExpression.eval().asInstanceOf[Int] | ||
|
||
override def inputTypes: Seq[AbstractDataType] = { | ||
Seq(DoubleType, TypeCollection(DoubleType, ArrayType(DoubleType)), IntegerType) | ||
Seq(TypeCollection(NumericType, DateType, TimestampType), | ||
TypeCollection(DoubleType, ArrayType(DoubleType)), IntegerType) | ||
} | ||
|
||
// Mark as lazy so that percentageExpression is not evaluated during tree transformation. | ||
|
@@ -123,7 +124,13 @@ case class ApproximatePercentile( | |
val value = child.eval(inputRow) | ||
// Ignore empty rows, for example: percentile_approx(null) | ||
if (value != null) { | ||
buffer.add(value.asInstanceOf[Double]) | ||
// Convert the value to a double value | ||
val doubleValue = child.dataType match { | ||
case DateType => value.asInstanceOf[Int].toDouble | ||
case TimestampType => value.asInstanceOf[Long].toDouble | ||
case n: NumericType => n.numeric.toDouble(value.asInstanceOf[n.InternalType]) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The same here. |
||
} | ||
buffer.add(doubleValue) | ||
} | ||
buffer | ||
} | ||
|
@@ -134,7 +141,18 @@ case class ApproximatePercentile( | |
} | ||
|
||
override def eval(buffer: PercentileDigest): Any = { | ||
val result = buffer.getPercentiles(percentages) | ||
val doubleResult = buffer.getPercentiles(percentages) | ||
val result = child.dataType match { | ||
case DateType => doubleResult.map(_.toInt) | ||
case TimestampType => doubleResult.map(_.toLong) | ||
case ByteType => doubleResult.map(_.toByte) | ||
case ShortType => doubleResult.map(_.toShort) | ||
case IntegerType => doubleResult.map(_.toInt) | ||
case LongType => doubleResult.map(_.toLong) | ||
case FloatType => doubleResult.map(_.toFloat) | ||
case DoubleType => doubleResult | ||
case _: DecimalType => doubleResult.map(Decimal(_)) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Add case other: DataType =>
throw new UnsupportedOperationException(s"Unexpected data type $other") |
||
} | ||
if (result.length == 0) { | ||
null | ||
} else if (returnPercentileArray) { | ||
|
@@ -156,7 +174,7 @@ case class ApproximatePercentile( | |
override def nullable: Boolean = true | ||
|
||
override def dataType: DataType = { | ||
if (returnPercentileArray) ArrayType(DoubleType, false) else DoubleType | ||
if (returnPercentileArray) ArrayType(child.dataType, false) else child.dataType | ||
} | ||
|
||
override def prettyName: String = "percentile_approx" | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -19,8 +19,8 @@ package org.apache.spark.sql.catalyst.expressions.aggregate | |
|
||
import org.apache.spark.SparkFunSuite | ||
import org.apache.spark.sql.catalyst.InternalRow | ||
import org.apache.spark.sql.catalyst.analysis.{SimpleAnalyzer, UnresolvedAttribute} | ||
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.TypeCheckFailure | ||
import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute | ||
import org.apache.spark.sql.catalyst.dsl.expressions._ | ||
import org.apache.spark.sql.catalyst.dsl.plans._ | ||
import org.apache.spark.sql.catalyst.expressions.{Alias, AttributeReference, BoundReference, Cast, CreateArray, DecimalLiteral, GenericInternalRow, Literal} | ||
|
@@ -270,7 +270,6 @@ class ApproximatePercentileSuite extends SparkFunSuite { | |
percentageExpression = percentageExpression, | ||
accuracyExpression = Literal(100)) | ||
|
||
val result = wrongPercentage.checkInputDataTypes() | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is duplicated by line 274. |
||
assert( | ||
wrongPercentage.checkInputDataTypes() match { | ||
case TypeCheckFailure(msg) if msg.contains("must be between 0.0 and 1.0") => true | ||
|
@@ -281,7 +280,6 @@ class ApproximatePercentileSuite extends SparkFunSuite { | |
|
||
test("class ApproximatePercentile, automatically add type casting for parameters") { | ||
val testRelation = LocalRelation('a.int) | ||
val analyzer = SimpleAnalyzer | ||
|
||
// Compatible accuracy types: Long type and decimal type | ||
val accuracyExpressions = Seq(Literal(1000L), DecimalLiteral(10000), Literal(123.0D)) | ||
|
@@ -299,7 +297,7 @@ class ApproximatePercentileSuite extends SparkFunSuite { | |
analyzed match { | ||
case Alias(agg: ApproximatePercentile, _) => | ||
assert(agg.resolved) | ||
assert(agg.child.dataType == DoubleType) | ||
assert(agg.child.dataType == IntegerType) | ||
assert(agg.percentageExpression.dataType == DoubleType || | ||
agg.percentageExpression.dataType == ArrayType(DoubleType, containsNull = false)) | ||
assert(agg.accuracyExpression.dataType == IntegerType) | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This will cause the result difference. We need to document it.