-
Notifications
You must be signed in to change notification settings - Fork 13.8k
[Flink-14599][table-planner-blink] Support precision of TimestampType in blink planner #10105
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
f0192dc
90df5e4
22fa480
88082bb
e6d75bc
0ec0b46
d44a523
a7ff4db
7cc6cf4
886eac4
a207224
77d1789
4a83ff6
1e4b5f4
f79164e
e76461e
89e2ab2
d3b1bc9
287c4a9
52cad96
11222cd
8860597
dfeca05
d17efb5
c3412c4
6b82048
aab2160
ee2854a
212e234
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -37,6 +37,7 @@ | |
import org.apache.flink.table.planner.expressions.converter.CallExpressionConvertRule.ConvertContext; | ||
import org.apache.flink.table.types.logical.DecimalType; | ||
import org.apache.flink.table.types.logical.LogicalType; | ||
import org.apache.flink.table.types.logical.TimestampType; | ||
|
||
import org.apache.calcite.avatica.util.DateTimeUtils; | ||
import org.apache.calcite.avatica.util.TimeUnit; | ||
|
@@ -54,6 +55,8 @@ | |
import org.apache.calcite.util.TimestampWithTimeZoneString; | ||
|
||
import java.math.BigDecimal; | ||
import java.sql.Timestamp; | ||
import java.time.LocalDateTime; | ||
import java.util.Arrays; | ||
import java.util.Calendar; | ||
import java.util.Date; | ||
|
@@ -135,8 +138,26 @@ public RexNode visit(ValueLiteralExpression valueLiteral) { | |
return relBuilder.getRexBuilder().makeTimeLiteral(TimeString.fromCalendarFields( | ||
valueAsCalendar(extractValue(valueLiteral, java.sql.Time.class))), 0); | ||
case TIMESTAMP_WITHOUT_TIME_ZONE: | ||
return relBuilder.getRexBuilder().makeTimestampLiteral(TimestampString.fromCalendarFields( | ||
valueAsCalendar(extractValue(valueLiteral, java.sql.Timestamp.class))), 3); | ||
TimestampType timestampType = (TimestampType) type; | ||
Class<?> clazz = valueLiteral.getOutputDataType().getConversionClass(); | ||
LocalDateTime datetime = null; | ||
if (clazz == LocalDateTime.class) { | ||
datetime = valueLiteral.getValueAs(LocalDateTime.class) | ||
.orElseThrow(() -> new TableException("Invalid literal.")); | ||
} else if (clazz == Timestamp.class) { | ||
datetime = valueLiteral.getValueAs(Timestamp.class) | ||
.orElseThrow(() -> new TableException("Invalid literal.")).toLocalDateTime(); | ||
} else { | ||
throw new TableException(String.format("Invalid literal of %s.", clazz.getCanonicalName())); | ||
} | ||
return relBuilder.getRexBuilder().makeTimestampLiteral( | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We could have util in |
||
new TimestampString( | ||
datetime.getYear(), | ||
datetime.getMonthValue(), | ||
datetime.getDayOfMonth(), | ||
datetime.getHour(), | ||
datetime.getMinute(), | ||
datetime.getSecond()).withNanos(datetime.getNano()), timestampType.getPrecision()); | ||
case TIMESTAMP_WITH_LOCAL_TIME_ZONE: | ||
TimeZone timeZone = TimeZone.getTimeZone(((FlinkContext) ((FlinkRelBuilder) this.relBuilder) | ||
.getCluster().getPlanner().getContext()).getTableConfig().getLocalTimeZone()); | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -19,8 +19,7 @@ | |
package org.apache.flink.table.planner.calcite | ||
|
||
import org.apache.flink.table.runtime.typeutils.TypeCheckUtils | ||
import org.apache.flink.table.types.logical.{DecimalType, LogicalType} | ||
|
||
import org.apache.flink.table.types.logical.{DecimalType, LogicalType, TimestampType} | ||
import org.apache.calcite.rel.`type`.{RelDataType, RelDataTypeFactory, RelDataTypeSystemImpl} | ||
import org.apache.calcite.sql.`type`.{SqlTypeName, SqlTypeUtil} | ||
|
||
|
@@ -41,8 +40,12 @@ class FlinkTypeSystem extends RelDataTypeSystemImpl { | |
case SqlTypeName.VARCHAR | SqlTypeName.VARBINARY => | ||
Int.MaxValue | ||
|
||
// by default we support timestamp with microseconds precision | ||
case SqlTypeName.TIMESTAMP => | ||
TimestampType.DEFAULT_PRECISION | ||
|
||
// we currently support only timestamps with milliseconds precision | ||
case SqlTypeName.TIMESTAMP | SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE => | ||
case SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE => | ||
3 | ||
|
||
case _ => | ||
|
@@ -53,6 +56,10 @@ class FlinkTypeSystem extends RelDataTypeSystemImpl { | |
case SqlTypeName.VARCHAR | SqlTypeName.CHAR | SqlTypeName.VARBINARY | SqlTypeName.BINARY => | ||
Int.MaxValue | ||
|
||
// The maximal precision of TIMESTAMP is 3, change it to 9 to support nanoseconds precision | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. What is this "maximal precision of TIMESTAMP is 3" mean? Is that a typo? |
||
case SqlTypeName.TIMESTAMP => | ||
TimestampType.MAX_PRECISION | ||
|
||
case _ => | ||
super.getMaxPrecision(typeName) | ||
} | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -23,22 +23,22 @@ import org.apache.flink.configuration.Configuration | |
import org.apache.flink.metrics.MetricGroup | ||
import org.apache.flink.table.api.{TableConfig, TableException} | ||
import org.apache.flink.table.dataformat.BinaryStringUtil.safeToString | ||
import org.apache.flink.table.dataformat.{BinaryString, Decimal, GenericRow} | ||
import org.apache.flink.table.dataformat.{BinaryString, Decimal, GenericRow, SqlTimestamp} | ||
import org.apache.flink.table.functions.{FunctionContext, UserDefinedFunction} | ||
import org.apache.flink.table.planner.calcite.FlinkTypeFactory | ||
import org.apache.flink.table.planner.codegen.FunctionCodeGenerator.generateFunction | ||
import org.apache.flink.table.planner.plan.utils.PythonUtil.containsPythonCall | ||
import org.apache.flink.table.runtime.functions.SqlDateTimeUtils | ||
import org.apache.flink.table.types.logical.RowType | ||
|
||
import org.apache.calcite.avatica.util.ByteString | ||
import org.apache.calcite.rex.{RexBuilder, RexExecutor, RexNode} | ||
import org.apache.calcite.sql.`type`.SqlTypeName | ||
import org.apache.calcite.util.TimestampString | ||
import org.apache.commons.lang3.StringEscapeUtils | ||
|
||
import java.io.File | ||
import java.util.TimeZone | ||
|
||
|
||
import scala.collection.JavaConverters._ | ||
import scala.collection.mutable.ListBuffer | ||
|
||
|
@@ -172,6 +172,19 @@ class ExpressionReducer( | |
} | ||
reducedValues.add(maySkipNullLiteralReduce(rexBuilder, value, unreduced)) | ||
reducedIdx += 1 | ||
case SqlTypeName.TIMESTAMP => | ||
val reducedValue = reduced.getField(reducedIdx) | ||
val value = if (reducedValue != null) { | ||
val ts = reducedValue.asInstanceOf[SqlTimestamp] | ||
val milliseconds = ts.getMillisecond | ||
val nanoseconds = ts.toLocalDateTime.getNano | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Why not just use There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. A little tricky here, take The [1] step considers Gregorian cutovers and again the [2] step uses I will change the There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think you can fix it, and it is very worthwhile to fix. Otherwise it will bring a lot of strange code. |
||
val timestampString = TimestampString.fromMillisSinceEpoch(milliseconds) | ||
timestampString.withNanos(nanoseconds) | ||
} else { | ||
reducedValue | ||
} | ||
reducedValues.add(maySkipNullLiteralReduce(rexBuilder, value, unreduced)) | ||
reducedIdx += 1 | ||
case _ => | ||
val reducedValue = reduced.getField(reducedIdx) | ||
// RexBuilder handle double literal incorrectly, convert it into BigDecimal manually | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
extractValue
.valueLiteral.getValueAs
.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
We can just use
valueLiteral.getValueAs(LocalDateTime.class)
.