From e7def6478b94c83f4e12969aeda77f63555f1576 Mon Sep 17 00:00:00 2001 From: Hakan Uzum Date: Sun, 12 Apr 2026 20:59:11 +0300 Subject: [PATCH 1/8] [improvement](fe) Support hour-offset date_add/date_sub in MTMV partition rollup ### What problem does this PR solve? Issue Number: close #62395 Related PR: None Problem Summary: MTMV partition rollup only handled `date_trunc(partition_col, unit)`. It could not track partition lineage when the MV partition expression uses hour offset conversion such as `date_trunc(date_add(k2, INTERVAL 3 HOUR), 'day')` or `date_trunc(date_sub(k2, INTERVAL 3 HOUR), 'day')`, which is needed for UTC-to-local-day partitioning and incremental maintenance. ### Release note Support MTMV partition rollup and partition increment check for `date_trunc(date_add/date_sub(partition_col, INTERVAL N HOUR), unit)`. ### Check List (For Author) - Test: Regression test / Unit Test - Unit Test: `./run-fe-ut.sh --run org.apache.doris.mtmv.MTMVRelatedPartitionDescRollUpGeneratorTest` - Build: `MVN_OPT='-Dmaven.build.cache.enabled=false' ./build.sh --fe --clean -j$(nproc)` - Regression test: `./run-regression-test.sh --run -d mtmv_p0 -s test_rollup_partition_mtmv_date_add` (failed locally because FE/BE test cluster was not started; connection refused) - Behavior changed: Yes (MTMV partition expression now supports hour-offset `date_add/date_sub` lineage under `date_trunc`) - Does this need documentation: No --- .../MTMVPartitionExprDateTruncDateAddSub.java | 299 ++++++++++++++++++ .../doris/mtmv/MTMVPartitionExprFactory.java | 7 + .../mv/PartitionIncrementMaintainer.java | 4 +- .../info/MTMVPartitionDefinition.java | 19 ++ ...latedPartitionDescRollUpGeneratorTest.java | 108 +++++++ ...test_rollup_partition_mtmv_date_add.groovy | 88 ++++++ 6 files changed, 524 insertions(+), 1 deletion(-) create mode 100644 fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVPartitionExprDateTruncDateAddSub.java create mode 100644 regression-test/suites/mtmv_p0/test_rollup_partition_mtmv_date_add.groovy diff --git a/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVPartitionExprDateTruncDateAddSub.java b/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVPartitionExprDateTruncDateAddSub.java new file mode 100644 index 00000000000000..7ef7439e515164 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVPartitionExprDateTruncDateAddSub.java @@ -0,0 +1,299 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.mtmv; + +import org.apache.doris.analysis.Expr; +import org.apache.doris.analysis.FunctionCallExpr; +import org.apache.doris.analysis.LiteralExpr; +import org.apache.doris.analysis.PartitionExprUtil; +import org.apache.doris.analysis.PartitionKeyDesc; +import org.apache.doris.analysis.PartitionValue; +import org.apache.doris.analysis.StringLiteral; +import org.apache.doris.analysis.TimestampArithmeticExpr; +import org.apache.doris.catalog.PartitionType; +import org.apache.doris.catalog.Type; +import org.apache.doris.common.AnalysisException; +import org.apache.doris.common.util.PropertyAnalyzer; +import org.apache.doris.datasource.mvcc.MvccUtil; +import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.executable.DateTimeArithmetic; +import org.apache.doris.nereids.trees.expressions.functions.executable.DateTimeExtractAndTransform; +import org.apache.doris.nereids.trees.expressions.literal.BigIntLiteral; +import org.apache.doris.nereids.trees.expressions.literal.DateTimeV2Literal; +import org.apache.doris.nereids.trees.expressions.literal.DateV2Literal; +import org.apache.doris.nereids.trees.expressions.literal.VarcharLiteral; + +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableSet; +import org.apache.commons.lang3.StringUtils; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; + +/** + * Rollup service for expressions: + * date_trunc(date_add/date_sub(partition_col, interval N hour), 'unit') + */ +public class MTMVPartitionExprDateTruncDateAddSub implements MTMVPartitionExprService { + private static final Set TIME_UNITS = ImmutableSet.of( + "year", "quarter", "week", "month", "day", "hour"); + private final String timeUnit; + private final long offsetHours; + + public MTMVPartitionExprDateTruncDateAddSub(FunctionCallExpr functionCallExpr) throws AnalysisException { + List paramsExprs = functionCallExpr.getParams().exprs(); + if (paramsExprs.size() != 2) { + throw new AnalysisException("date_trunc params exprs size should be 2."); + } + Expr dateTruncUnit = paramsExprs.get(1); + if (!(dateTruncUnit instanceof StringLiteral)) { + throw new AnalysisException("date_trunc param of time unit is not string literal."); + } + this.timeUnit = dateTruncUnit.getStringValue().toLowerCase(); + + Expr dateTruncArg = paramsExprs.get(0); + if (!(dateTruncArg instanceof TimestampArithmeticExpr)) { + throw new AnalysisException("date_trunc first argument should be date_add/date_sub in mtmv partition"); + } + TimestampArithmeticExpr timestampArithmeticExpr = (TimestampArithmeticExpr) dateTruncArg; + if (timestampArithmeticExpr.getTimeUnit() != TimestampArithmeticExpr.TimeUnit.HOUR) { + throw new AnalysisException("only HOUR unit is supported in date_add/date_sub for mtmv partition"); + } + if (!(timestampArithmeticExpr.getChild(1) instanceof LiteralExpr)) { + throw new AnalysisException("date_add/date_sub offset should be a literal"); + } + long offset = parseOffsetHours((LiteralExpr) timestampArithmeticExpr.getChild(1)); + String functionName = timestampArithmeticExpr.getFuncName().toLowerCase(); + if ("date_sub".equals(functionName)) { + this.offsetHours = -offset; + } else if ("date_add".equals(functionName)) { + this.offsetHours = offset; + } else { + throw new AnalysisException("only date_add/date_sub is supported in mtmv partition"); + } + } + + @Override + public String getRollUpIdentity(PartitionKeyDesc partitionKeyDesc, Map mvProperties) + throws AnalysisException { + String res = null; + Optional dateFormat = getDateFormat(mvProperties); + List> inValues = partitionKeyDesc.getInValues(); + for (int i = 0; i < inValues.size(); i++) { + PartitionValue partitionValue = inValues.get(i).get(0); + if (partitionValue.isNullPartition()) { + throw new AnalysisException("date_trunc + date_add/date_sub not support null partition value"); + } + String identity = dateTruncByOffset(partitionValue.getStringValue(), dateFormat, false).toString(); + if (i == 0) { + res = identity; + } else if (!Objects.equals(res, identity)) { + throw new AnalysisException( + String.format("partition values not equal, res: %s, identity: %s", res, identity)); + } + } + return res; + } + + @Override + public PartitionKeyDesc generateRollUpPartitionKeyDesc(PartitionKeyDesc partitionKeyDesc, + MTMVPartitionInfo mvPartitionInfo, MTMVRelatedTableIf pctTable) throws AnalysisException { + Type partitionColumnType = MTMVPartitionUtil + .getPartitionColumnType(pctTable, mvPartitionInfo.getPartitionColByPctTable(pctTable)); + Preconditions.checkState(partitionKeyDesc.getLowerValues().size() == 1, + "only support one partition column"); + DateTimeV2Literal beginTime = dateTruncByOffset( + partitionKeyDesc.getLowerValues().get(0).getStringValue(), Optional.empty(), false); + PartitionValue lowerValue = new PartitionValue(dateTimeToStr(beginTime, partitionColumnType)); + PartitionValue upperValue = getUpperValue(partitionKeyDesc.getUpperValues().get(0), beginTime, + partitionColumnType); + return PartitionKeyDesc.createFixed( + Collections.singletonList(lowerValue), + Collections.singletonList(upperValue)); + } + + @Override + public void analyze(MTMVPartitionInfo mvPartitionInfo) throws AnalysisException { + if (!TIME_UNITS.contains(this.timeUnit)) { + throw new AnalysisException( + String.format("timeUnit not support: %s, only support: %s", this.timeUnit, TIME_UNITS)); + } + List pctInfos = mvPartitionInfo.getPctInfos(); + for (BaseColInfo pctInfo : pctInfos) { + MTMVRelatedTableIf pctTable = MTMVUtil.getRelatedTable(pctInfo.getTableInfo()); + PartitionType partitionType = pctTable.getPartitionType(MvccUtil.getSnapshotFromContext(pctTable)); + if (partitionType == PartitionType.RANGE) { + Type partitionColumnType = MTMVPartitionUtil.getPartitionColumnType(pctTable, pctInfo.getColName()); + if (!partitionColumnType.isDateType()) { + throw new AnalysisException( + "partitionColumnType should be date/datetime " + + "when PartitionType is range and expr is date_trunc + date_add/date_sub"); + } + } else { + throw new AnalysisException("date_trunc + date_add/date_sub only support range partition"); + } + } + } + + @Override + public String toSql(MTMVPartitionInfo mvPartitionInfo) { + String funcName = offsetHours >= 0 ? "date_add" : "date_sub"; + return String.format("date_trunc(%s(`%s`, INTERVAL %s HOUR), '%s')", + funcName, mvPartitionInfo.getPartitionCol(), Math.abs(offsetHours), timeUnit); + } + + private PartitionValue getUpperValue(PartitionValue upperValue, DateTimeV2Literal beginTruncTime, + Type partitionColumnType) throws AnalysisException { + if (upperValue.isMax()) { + throw new AnalysisException("date_trunc + date_add/date_sub not support MAXVALUE partition"); + } + DateTimeV2Literal endTruncTime = dateTruncByOffset(upperValue.getStringValue(), Optional.empty(), true); + if (!Objects.equals(beginTruncTime, endTruncTime)) { + throw new AnalysisException( + String.format("partition values not equal, beginTruncTime: %s, endTruncTime: %s", + beginTruncTime, endTruncTime)); + } + DateTimeV2Literal endTime = dateIncrement(beginTruncTime); + return new PartitionValue(dateTimeToStr(endTime, partitionColumnType)); + } + + private DateTimeV2Literal dateTruncByOffset(String value, Optional dateFormat, boolean isUpper) + throws AnalysisException { + DateTimeV2Literal dateTimeLiteral = strToDate(value, dateFormat); + dateTimeLiteral = dateOffset(dateTimeLiteral); + if (isUpper) { + dateTimeLiteral = (DateTimeV2Literal) DateTimeArithmetic.secondsSub(dateTimeLiteral, new BigIntLiteral(1)); + } + Expression expression = DateTimeExtractAndTransform.dateTrunc(dateTimeLiteral, new VarcharLiteral(timeUnit)); + if (!(expression instanceof DateTimeV2Literal)) { + throw new AnalysisException("dateTrunc() should return DateLiteral, expression: " + expression); + } + return (DateTimeV2Literal) expression; + } + + private DateTimeV2Literal dateOffset(DateTimeV2Literal value) throws AnalysisException { + long offsetSeconds = offsetHours * 3600L; + Expression result = offsetSeconds >= 0 + ? DateTimeArithmetic.secondsAdd(value, new BigIntLiteral(offsetSeconds)) + : DateTimeArithmetic.secondsSub(value, new BigIntLiteral(-offsetSeconds)); + if (!(result instanceof DateTimeV2Literal)) { + throw new AnalysisException("date offset should return DateTimeV2Literal, result: " + result); + } + return (DateTimeV2Literal) result; + } + + private DateTimeV2Literal strToDate(String value, Optional dateFormat) throws AnalysisException { + try { + return new DateTimeV2Literal(value); + } catch (Exception e) { + if (!dateFormat.isPresent()) { + throw e; + } + Expression strToDate = DateTimeExtractAndTransform.strToDate(new VarcharLiteral(value), + new VarcharLiteral(dateFormat.get())); + if (strToDate instanceof DateV2Literal) { + DateV2Literal dateV2Literal = (DateV2Literal) strToDate; + return new DateTimeV2Literal(dateV2Literal.getYear(), dateV2Literal.getMonth(), dateV2Literal.getDay(), + 0, 0, 0); + } else if (strToDate instanceof DateTimeV2Literal) { + return (DateTimeV2Literal) strToDate; + } else { + throw new AnalysisException( + String.format("strToDate failed, stringValue: %s, dateFormat: %s", value, dateFormat)); + } + } + } + + private DateTimeV2Literal dateIncrement(DateTimeV2Literal value) throws AnalysisException { + Expression result; + switch (timeUnit) { + case "year": + result = value.plusYears(1L); + break; + case "quarter": + result = value.plusMonths(3L); + break; + case "month": + result = value.plusMonths(1L); + break; + case "week": + result = value.plusWeeks(1L); + break; + case "day": + result = value.plusDays(1L); + break; + case "hour": + result = value.plusHours(1L); + break; + default: + throw new AnalysisException( + "async materialized view partition roll up not support timeUnit: " + timeUnit); + } + if (!(result instanceof DateTimeV2Literal)) { + throw new AnalysisException("dateIncrement() should return DateTimeLiteral, result: " + result); + } + return (DateTimeV2Literal) result; + } + + private String dateTimeToStr(DateTimeV2Literal literal, Type partitionColumnType) throws AnalysisException { + if (partitionColumnType.isDate() || partitionColumnType.isDateV2()) { + return String.format(PartitionExprUtil.DATE_FORMATTER, literal.getYear(), literal.getMonth(), + literal.getDay()); + } else if (partitionColumnType.isDatetime() || partitionColumnType.isDatetimeV2() + || partitionColumnType.isTimeStampTz()) { + return String.format(PartitionExprUtil.DATETIME_FORMATTER, + literal.getYear(), literal.getMonth(), literal.getDay(), + literal.getHour(), literal.getMinute(), literal.getSecond()); + } else { + throw new AnalysisException( + "MTMV not support partition with column type : " + partitionColumnType); + } + } + + private Optional getDateFormat(Map mvProperties) { + return StringUtils.isEmpty(mvProperties.get(PropertyAnalyzer.PROPERTIES_PARTITION_DATE_FORMAT)) + ? Optional.empty() + : Optional.of(mvProperties.get(PropertyAnalyzer.PROPERTIES_PARTITION_DATE_FORMAT)); + } + + private long parseOffsetHours(LiteralExpr offsetExpr) throws AnalysisException { + try { + return Long.parseLong(offsetExpr.getStringValue()); + } catch (NumberFormatException e) { + throw new AnalysisException("date_add/date_sub hour offset should be integer"); + } + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + MTMVPartitionExprDateTruncDateAddSub that = (MTMVPartitionExprDateTruncDateAddSub) o; + return offsetHours == that.offsetHours && Objects.equals(timeUnit, that.timeUnit); + } + + @Override + public int hashCode() { + return Objects.hash(timeUnit, offsetHours); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVPartitionExprFactory.java b/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVPartitionExprFactory.java index 1093399450ddef..284f86995982c0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVPartitionExprFactory.java +++ b/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVPartitionExprFactory.java @@ -19,8 +19,11 @@ import org.apache.doris.analysis.Expr; import org.apache.doris.analysis.FunctionCallExpr; +import org.apache.doris.analysis.TimestampArithmeticExpr; import org.apache.doris.common.AnalysisException; +import java.util.List; + /** * MTMV Partition Expr Factory */ @@ -32,6 +35,10 @@ public static MTMVPartitionExprService getExprService(Expr expr) throws Analysis FunctionCallExpr functionCallExpr = (FunctionCallExpr) expr; String fnName = functionCallExpr.getFnName().getFunction().toLowerCase(); if ("date_trunc".equals(fnName)) { + List paramsExprs = functionCallExpr.getParams().exprs(); + if (paramsExprs.size() == 2 && paramsExprs.get(0) instanceof TimestampArithmeticExpr) { + return new MTMVPartitionExprDateTruncDateAddSub(functionCallExpr); + } return new MTMVPartitionExprDateTrunc(functionCallExpr); } throw new AnalysisException("async materialized view partition not support function name: " + fnName); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/PartitionIncrementMaintainer.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/PartitionIncrementMaintainer.java index 4b29f95aeef8ae..8af5e11dc4d6fa 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/PartitionIncrementMaintainer.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/PartitionIncrementMaintainer.java @@ -35,6 +35,8 @@ import org.apache.doris.nereids.trees.expressions.SlotReference; import org.apache.doris.nereids.trees.expressions.WindowExpression; import org.apache.doris.nereids.trees.expressions.functions.scalar.DateTrunc; +import org.apache.doris.nereids.trees.expressions.functions.scalar.HoursAdd; +import org.apache.doris.nereids.trees.expressions.functions.scalar.HoursSub; import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.visitor.DefaultExpressionRewriter; import org.apache.doris.nereids.trees.plans.JoinType; @@ -91,7 +93,7 @@ public static class PartitionIncrementChecker extends DefaultPlanVisitor { public static final PartitionIncrementChecker INSTANCE = new PartitionIncrementChecker(); public static final Set> SUPPORT_EXPRESSION_TYPES = - ImmutableSet.of(DateTrunc.class, SlotReference.class, Literal.class); + ImmutableSet.of(DateTrunc.class, HoursAdd.class, HoursSub.class, SlotReference.class, Literal.class); @Override public Void visitLogicalProject(LogicalProject project, diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/MTMVPartitionDefinition.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/MTMVPartitionDefinition.java index 4f591c2261ed7a..ae76f76de927a5 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/MTMVPartitionDefinition.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/MTMVPartitionDefinition.java @@ -24,6 +24,7 @@ import org.apache.doris.analysis.FunctionCallExpr; import org.apache.doris.analysis.SlotRef; import org.apache.doris.analysis.StringLiteral; +import org.apache.doris.analysis.TimestampArithmeticExpr; import org.apache.doris.catalog.Column; import org.apache.doris.catalog.PartitionType; import org.apache.doris.common.Pair; @@ -47,6 +48,8 @@ import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.Slot; import org.apache.doris.nereids.trees.expressions.functions.scalar.DateTrunc; +import org.apache.doris.nereids.trees.expressions.functions.scalar.HoursAdd; +import org.apache.doris.nereids.trees.expressions.functions.scalar.HoursSub; import org.apache.doris.nereids.trees.expressions.literal.Literal; import com.google.common.collect.Lists; @@ -180,6 +183,10 @@ private static List> convertDateTruncToLegacyArguments(List< private static Pair convertToLegacyRecursion(Expression expression) { if (expression instanceof Slot) { return Pair.of(1, new SlotRef(null, ((Slot) expression).getName())); + } else if (expression instanceof HoursAdd) { + return Pair.of(1, convertDateAddSubToLegacy((HoursAdd) expression, "date_add")); + } else if (expression instanceof HoursSub) { + return Pair.of(1, convertDateAddSubToLegacy((HoursSub) expression, "date_sub")); } else if (expression instanceof Literal) { return Pair.of(2, new StringLiteral(((Literal) expression).getStringValue())); } else if (expression instanceof Cast) { @@ -190,6 +197,18 @@ private static Pair convertToLegacyRecursion(Expression expressio } } + private static TimestampArithmeticExpr convertDateAddSubToLegacy(Expression expression, String funcName) { + Pair timeExprPair = convertToLegacyRecursion(expression.child(0)); + if (timeExprPair.key() != 1) { + throw new AnalysisException("unsupported date arithmetic argument " + expression.toString()); + } + if (!(expression.child(1) instanceof Literal)) { + throw new AnalysisException("date arithmetic offset should be literal " + expression.toString()); + } + Expr amountExpr = ((Literal) expression.child(1)).toLegacyLiteral(); + return new TimestampArithmeticExpr(funcName, timeExprPair.value(), amountExpr, "HOUR"); + } + public MTMVPartitionType getPartitionType() { return partitionType; } diff --git a/fe/fe-core/src/test/java/org/apache/doris/mtmv/MTMVRelatedPartitionDescRollUpGeneratorTest.java b/fe/fe-core/src/test/java/org/apache/doris/mtmv/MTMVRelatedPartitionDescRollUpGeneratorTest.java index c2a7e31ff5c3ac..6482efebeb9584 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/mtmv/MTMVRelatedPartitionDescRollUpGeneratorTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/mtmv/MTMVRelatedPartitionDescRollUpGeneratorTest.java @@ -18,10 +18,12 @@ package org.apache.doris.mtmv; import org.apache.doris.analysis.FunctionCallExpr; +import org.apache.doris.analysis.IntLiteral; import org.apache.doris.analysis.PartitionKeyDesc; import org.apache.doris.analysis.PartitionValue; import org.apache.doris.analysis.SlotRef; import org.apache.doris.analysis.StringLiteral; +import org.apache.doris.analysis.TimestampArithmeticExpr; import org.apache.doris.catalog.Type; import org.apache.doris.common.AnalysisException; import org.apache.doris.mtmv.MTMVPartitionInfo.MTMVPartitionType; @@ -141,6 +143,112 @@ public void testRollUpList() throws AnalysisException { Assert.assertEquals(Sets.newHashSet("name3"), res.get(expectDesc202002)); } + @Test + public void testRollUpRangeDateAddHour() throws AnalysisException { + FunctionCallExpr expr = new FunctionCallExpr("date_trunc", + Lists.newArrayList( + new TimestampArithmeticExpr("date_add", new SlotRef(null, null), new IntLiteral(3), "HOUR"), + new StringLiteral("day")), + true); + new Expectations() { + { + mtmvPartitionUtil.getPartitionColumnType((MTMVRelatedTableIf) any, (String) any); + minTimes = 0; + result = Type.DATETIMEV2; + + mtmvPartitionInfo.getRelatedTable(); + minTimes = 0; + result = null; + + mtmvPartitionInfo.getExpr(); + minTimes = 0; + result = null; + + mtmvPartitionInfo.getPartitionType(); + minTimes = 0; + result = MTMVPartitionType.EXPR; + + mtmvPartitionInfo.getExpr(); + minTimes = 0; + result = expr; + } + }; + MTMVRelatedPartitionDescRollUpGenerator generator = new MTMVRelatedPartitionDescRollUpGenerator(); + Map> relatedPartitionDescs = Maps.newHashMap(); + PartitionKeyDesc desc20250724 = PartitionKeyDesc.createFixed( + Lists.newArrayList(new PartitionValue("2025-07-24 21:00:00")), + Lists.newArrayList(new PartitionValue("2025-07-25 21:00:00"))); + PartitionKeyDesc desc20250725 = PartitionKeyDesc.createFixed( + Lists.newArrayList(new PartitionValue("2025-07-25 21:00:00")), + Lists.newArrayList(new PartitionValue("2025-07-26 21:00:00"))); + relatedPartitionDescs.put(desc20250724, Sets.newHashSet("name1")); + relatedPartitionDescs.put(desc20250725, Sets.newHashSet("name2")); + Map> res = generator.rollUpRange(relatedPartitionDescs, mtmvPartitionInfo, null); + + PartitionKeyDesc expectDesc20250725 = PartitionKeyDesc.createFixed( + Lists.newArrayList(new PartitionValue("2025-07-25 00:00:00")), + Lists.newArrayList(new PartitionValue("2025-07-26 00:00:00"))); + PartitionKeyDesc expectDesc20250726 = PartitionKeyDesc.createFixed( + Lists.newArrayList(new PartitionValue("2025-07-26 00:00:00")), + Lists.newArrayList(new PartitionValue("2025-07-27 00:00:00"))); + Assert.assertEquals(2, res.size()); + Assert.assertEquals(Sets.newHashSet("name1"), res.get(expectDesc20250725)); + Assert.assertEquals(Sets.newHashSet("name2"), res.get(expectDesc20250726)); + } + + @Test + public void testRollUpRangeDateSubHour() throws AnalysisException { + FunctionCallExpr expr = new FunctionCallExpr("date_trunc", + Lists.newArrayList( + new TimestampArithmeticExpr("date_sub", new SlotRef(null, null), new IntLiteral(3), "HOUR"), + new StringLiteral("day")), + true); + new Expectations() { + { + mtmvPartitionUtil.getPartitionColumnType((MTMVRelatedTableIf) any, (String) any); + minTimes = 0; + result = Type.DATETIMEV2; + + mtmvPartitionInfo.getRelatedTable(); + minTimes = 0; + result = null; + + mtmvPartitionInfo.getExpr(); + minTimes = 0; + result = null; + + mtmvPartitionInfo.getPartitionType(); + minTimes = 0; + result = MTMVPartitionType.EXPR; + + mtmvPartitionInfo.getExpr(); + minTimes = 0; + result = expr; + } + }; + MTMVRelatedPartitionDescRollUpGenerator generator = new MTMVRelatedPartitionDescRollUpGenerator(); + Map> relatedPartitionDescs = Maps.newHashMap(); + PartitionKeyDesc desc20250725 = PartitionKeyDesc.createFixed( + Lists.newArrayList(new PartitionValue("2025-07-25 03:00:00")), + Lists.newArrayList(new PartitionValue("2025-07-26 03:00:00"))); + PartitionKeyDesc desc20250726 = PartitionKeyDesc.createFixed( + Lists.newArrayList(new PartitionValue("2025-07-26 03:00:00")), + Lists.newArrayList(new PartitionValue("2025-07-27 03:00:00"))); + relatedPartitionDescs.put(desc20250725, Sets.newHashSet("name1")); + relatedPartitionDescs.put(desc20250726, Sets.newHashSet("name2")); + Map> res = generator.rollUpRange(relatedPartitionDescs, mtmvPartitionInfo, null); + + PartitionKeyDesc expectDesc20250725 = PartitionKeyDesc.createFixed( + Lists.newArrayList(new PartitionValue("2025-07-25 00:00:00")), + Lists.newArrayList(new PartitionValue("2025-07-26 00:00:00"))); + PartitionKeyDesc expectDesc20250726 = PartitionKeyDesc.createFixed( + Lists.newArrayList(new PartitionValue("2025-07-26 00:00:00")), + Lists.newArrayList(new PartitionValue("2025-07-27 00:00:00"))); + Assert.assertEquals(2, res.size()); + Assert.assertEquals(Sets.newHashSet("name1"), res.get(expectDesc20250725)); + Assert.assertEquals(Sets.newHashSet("name2"), res.get(expectDesc20250726)); + } + private PartitionKeyDesc generateInDesc(String... values) { List> partitionValues = Lists.newArrayList(); for (String value : values) { diff --git a/regression-test/suites/mtmv_p0/test_rollup_partition_mtmv_date_add.groovy b/regression-test/suites/mtmv_p0/test_rollup_partition_mtmv_date_add.groovy new file mode 100644 index 00000000000000..e2fac8b49b31e8 --- /dev/null +++ b/regression-test/suites/mtmv_p0/test_rollup_partition_mtmv_date_add.groovy @@ -0,0 +1,88 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_rollup_partition_mtmv_date_add") { + sql """drop materialized view if exists mv_test_rollup_partition_mtmv_date_add""" + sql """drop table if exists t_test_rollup_partition_mtmv_date_add""" + + sql """ + CREATE TABLE t_test_rollup_partition_mtmv_date_add ( + id BIGINT NOT NULL, + k2 DATETIME NOT NULL + ) ENGINE=OLAP + DUPLICATE KEY(id) + PARTITION BY range(k2) + ( + PARTITION p_20250724 VALUES [("2025-07-24 21:00:00"),("2025-07-25 21:00:00")), + PARTITION p_20250725 VALUES [("2025-07-25 21:00:00"),("2025-07-26 21:00:00")) + ) + DISTRIBUTED BY HASH(id) BUCKETS 1 + PROPERTIES ('replication_num' = '1'); + """ + + sql """ + INSERT INTO t_test_rollup_partition_mtmv_date_add VALUES + (1, "2025-07-24 21:01:23"), + (2, "2025-07-25 20:59:00"), + (3, "2025-07-25 21:10:00"); + """ + + sql """ + CREATE MATERIALIZED VIEW mv_test_rollup_partition_mtmv_date_add + BUILD IMMEDIATE REFRESH AUTO ON MANUAL + partition by (date_trunc(day_alias, 'day')) + DISTRIBUTED BY RANDOM BUCKETS 1 + PROPERTIES ('replication_num' = '1') + AS + SELECT date_trunc(date_add(k2, INTERVAL 3 HOUR), 'day') AS day_alias, count(*) AS cnt + FROM t_test_rollup_partition_mtmv_date_add + GROUP BY day_alias; + """ + waitingMTMVTaskFinishedByMvName("mv_test_rollup_partition_mtmv_date_add") + def showPartitionsResult = sql """show partitions from mv_test_rollup_partition_mtmv_date_add""" + assertEquals(2, showPartitionsResult.size()) + assertTrue(showPartitionsResult.toString().contains("2025-07-25 00:00:00")) + assertTrue(showPartitionsResult.toString().contains("2025-07-26 00:00:00")) + + def mvRows = sql """ + SELECT day_alias, cnt + FROM mv_test_rollup_partition_mtmv_date_add + ORDER BY day_alias + """ + assertEquals(2, mvRows.size()) + assertEquals("2025-07-25 00:00:00", mvRows[0][0].toString()) + assertEquals("2", mvRows[0][1].toString()) + assertEquals("2025-07-26 00:00:00", mvRows[1][0].toString()) + assertEquals("1", mvRows[1][1].toString()) + + sql """drop materialized view if exists mv_test_rollup_partition_mtmv_date_add""" + + test { + sql """ + CREATE MATERIALIZED VIEW mv_test_rollup_partition_mtmv_date_add + BUILD IMMEDIATE REFRESH AUTO ON MANUAL + partition by (date_trunc(day_alias, 'day')) + DISTRIBUTED BY RANDOM BUCKETS 1 + PROPERTIES ('replication_num' = '1') + AS + SELECT date_trunc(date_add(k2, INTERVAL 30 MINUTE), 'day') AS day_alias, count(*) AS cnt + FROM t_test_rollup_partition_mtmv_date_add + GROUP BY day_alias; + """ + exception "unsupported argument" + } +} From 8e86e7d934cc85396b8e11d98c78a594d16f4259 Mon Sep 17 00:00:00 2001 From: Hakan Uzum Date: Sun, 12 Apr 2026 23:07:59 +0300 Subject: [PATCH 2/8] [improvement](fe) Allow Cast in MTMV partition increment expression tree ### What problem does this PR solve? Issue Number: close #62395 Related PR: None Problem Summary: Partition increment check walks all sub-expressions; datetime plans often wrap slots in Cast under HoursAdd/HoursSub. Without Cast in the allowed set, MTMV partition analysis failed with invalid implicit expression. ### Release note None ### Check List (For Author) - Test: No need to test (with reason) - Checkstyle verified: mvn checkstyle:check -pl fe-core - Behavior changed: No - Does this need documentation: No Made-with: Cursor --- .../rules/exploration/mv/PartitionIncrementMaintainer.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/PartitionIncrementMaintainer.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/PartitionIncrementMaintainer.java index 8af5e11dc4d6fa..e9286d23b2cb12 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/PartitionIncrementMaintainer.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/PartitionIncrementMaintainer.java @@ -29,6 +29,7 @@ import org.apache.doris.nereids.rules.expression.ExpressionNormalization; import org.apache.doris.nereids.rules.expression.ExpressionRewriteContext; import org.apache.doris.nereids.trees.expressions.CTEId; +import org.apache.doris.nereids.trees.expressions.Cast; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.NamedExpression; import org.apache.doris.nereids.trees.expressions.Slot; @@ -93,7 +94,8 @@ public static class PartitionIncrementChecker extends DefaultPlanVisitor { public static final PartitionIncrementChecker INSTANCE = new PartitionIncrementChecker(); public static final Set> SUPPORT_EXPRESSION_TYPES = - ImmutableSet.of(DateTrunc.class, HoursAdd.class, HoursSub.class, SlotReference.class, Literal.class); + ImmutableSet.of(Cast.class, DateTrunc.class, HoursAdd.class, HoursSub.class, SlotReference.class, + Literal.class); @Override public Void visitLogicalProject(LogicalProject project, From e61cad087fb245cefbcb44a66e3d32dc6270d6c5 Mon Sep 17 00:00:00 2001 From: Hakan Uzum Date: Tue, 14 Apr 2026 17:02:47 +0300 Subject: [PATCH 3/8] [test](fe) Replace JMockit expectations with Mockito static stubs --- ...latedPartitionDescRollUpGeneratorTest.java | 150 ++++++++---------- 1 file changed, 62 insertions(+), 88 deletions(-) diff --git a/fe/fe-core/src/test/java/org/apache/doris/mtmv/MTMVRelatedPartitionDescRollUpGeneratorTest.java b/fe/fe-core/src/test/java/org/apache/doris/mtmv/MTMVRelatedPartitionDescRollUpGeneratorTest.java index 4e03df5cbcdb16..d0feb2edf44648 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/mtmv/MTMVRelatedPartitionDescRollUpGeneratorTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/mtmv/MTMVRelatedPartitionDescRollUpGeneratorTest.java @@ -117,50 +117,37 @@ public void testRollUpRangeDateAddHour() throws AnalysisException { new TimestampArithmeticExpr("date_add", new SlotRef(null, null), new IntLiteral(3), "HOUR"), new StringLiteral("day")), true); - new Expectations() { - { - mtmvPartitionUtil.getPartitionColumnType((MTMVRelatedTableIf) any, (String) any); - minTimes = 0; - result = Type.DATETIMEV2; - - mtmvPartitionInfo.getRelatedTable(); - minTimes = 0; - result = null; - - mtmvPartitionInfo.getExpr(); - minTimes = 0; - result = null; - - mtmvPartitionInfo.getPartitionType(); - minTimes = 0; - result = MTMVPartitionType.EXPR; - - mtmvPartitionInfo.getExpr(); - minTimes = 0; - result = expr; - } - }; - MTMVRelatedPartitionDescRollUpGenerator generator = new MTMVRelatedPartitionDescRollUpGenerator(); - Map> relatedPartitionDescs = Maps.newHashMap(); - PartitionKeyDesc desc20250724 = PartitionKeyDesc.createFixed( - Lists.newArrayList(new PartitionValue("2025-07-24 21:00:00")), - Lists.newArrayList(new PartitionValue("2025-07-25 21:00:00"))); - PartitionKeyDesc desc20250725 = PartitionKeyDesc.createFixed( - Lists.newArrayList(new PartitionValue("2025-07-25 21:00:00")), - Lists.newArrayList(new PartitionValue("2025-07-26 21:00:00"))); - relatedPartitionDescs.put(desc20250724, Sets.newHashSet("name1")); - relatedPartitionDescs.put(desc20250725, Sets.newHashSet("name2")); - Map> res = generator.rollUpRange(relatedPartitionDescs, mtmvPartitionInfo, null); - - PartitionKeyDesc expectDesc20250725 = PartitionKeyDesc.createFixed( - Lists.newArrayList(new PartitionValue("2025-07-25 00:00:00")), - Lists.newArrayList(new PartitionValue("2025-07-26 00:00:00"))); - PartitionKeyDesc expectDesc20250726 = PartitionKeyDesc.createFixed( - Lists.newArrayList(new PartitionValue("2025-07-26 00:00:00")), - Lists.newArrayList(new PartitionValue("2025-07-27 00:00:00"))); - Assert.assertEquals(2, res.size()); - Assert.assertEquals(Sets.newHashSet("name1"), res.get(expectDesc20250725)); - Assert.assertEquals(Sets.newHashSet("name2"), res.get(expectDesc20250726)); + try (MockedStatic mtmvPartitionUtilStatic = Mockito.mockStatic(MTMVPartitionUtil.class)) { + mtmvPartitionUtilStatic.when(() -> MTMVPartitionUtil.getPartitionColumnType( + Mockito.nullable(MTMVRelatedTableIf.class), Mockito.nullable(String.class))) + .thenReturn(Type.DATETIMEV2); + Mockito.when(mtmvPartitionInfo.getRelatedTable()).thenReturn(null); + Mockito.when(mtmvPartitionInfo.getExpr()).thenReturn(expr); + Mockito.when(mtmvPartitionInfo.getPartitionType()).thenReturn(MTMVPartitionType.EXPR); + + MTMVRelatedPartitionDescRollUpGenerator generator = new MTMVRelatedPartitionDescRollUpGenerator(); + Map> relatedPartitionDescs = Maps.newHashMap(); + PartitionKeyDesc desc20250724 = PartitionKeyDesc.createFixed( + Lists.newArrayList(new PartitionValue("2025-07-24 21:00:00")), + Lists.newArrayList(new PartitionValue("2025-07-25 21:00:00"))); + PartitionKeyDesc desc20250725 = PartitionKeyDesc.createFixed( + Lists.newArrayList(new PartitionValue("2025-07-25 21:00:00")), + Lists.newArrayList(new PartitionValue("2025-07-26 21:00:00"))); + relatedPartitionDescs.put(desc20250724, Sets.newHashSet("name1")); + relatedPartitionDescs.put(desc20250725, Sets.newHashSet("name2")); + Map> res = generator.rollUpRange(relatedPartitionDescs, + mtmvPartitionInfo, null); + + PartitionKeyDesc expectDesc20250725 = PartitionKeyDesc.createFixed( + Lists.newArrayList(new PartitionValue("2025-07-25 00:00:00")), + Lists.newArrayList(new PartitionValue("2025-07-26 00:00:00"))); + PartitionKeyDesc expectDesc20250726 = PartitionKeyDesc.createFixed( + Lists.newArrayList(new PartitionValue("2025-07-26 00:00:00")), + Lists.newArrayList(new PartitionValue("2025-07-27 00:00:00"))); + Assert.assertEquals(2, res.size()); + Assert.assertEquals(Sets.newHashSet("name1"), res.get(expectDesc20250725)); + Assert.assertEquals(Sets.newHashSet("name2"), res.get(expectDesc20250726)); + } } @Test @@ -170,50 +157,37 @@ public void testRollUpRangeDateSubHour() throws AnalysisException { new TimestampArithmeticExpr("date_sub", new SlotRef(null, null), new IntLiteral(3), "HOUR"), new StringLiteral("day")), true); - new Expectations() { - { - mtmvPartitionUtil.getPartitionColumnType((MTMVRelatedTableIf) any, (String) any); - minTimes = 0; - result = Type.DATETIMEV2; - - mtmvPartitionInfo.getRelatedTable(); - minTimes = 0; - result = null; - - mtmvPartitionInfo.getExpr(); - minTimes = 0; - result = null; - - mtmvPartitionInfo.getPartitionType(); - minTimes = 0; - result = MTMVPartitionType.EXPR; - - mtmvPartitionInfo.getExpr(); - minTimes = 0; - result = expr; - } - }; - MTMVRelatedPartitionDescRollUpGenerator generator = new MTMVRelatedPartitionDescRollUpGenerator(); - Map> relatedPartitionDescs = Maps.newHashMap(); - PartitionKeyDesc desc20250725 = PartitionKeyDesc.createFixed( - Lists.newArrayList(new PartitionValue("2025-07-25 03:00:00")), - Lists.newArrayList(new PartitionValue("2025-07-26 03:00:00"))); - PartitionKeyDesc desc20250726 = PartitionKeyDesc.createFixed( - Lists.newArrayList(new PartitionValue("2025-07-26 03:00:00")), - Lists.newArrayList(new PartitionValue("2025-07-27 03:00:00"))); - relatedPartitionDescs.put(desc20250725, Sets.newHashSet("name1")); - relatedPartitionDescs.put(desc20250726, Sets.newHashSet("name2")); - Map> res = generator.rollUpRange(relatedPartitionDescs, mtmvPartitionInfo, null); - - PartitionKeyDesc expectDesc20250725 = PartitionKeyDesc.createFixed( - Lists.newArrayList(new PartitionValue("2025-07-25 00:00:00")), - Lists.newArrayList(new PartitionValue("2025-07-26 00:00:00"))); - PartitionKeyDesc expectDesc20250726 = PartitionKeyDesc.createFixed( - Lists.newArrayList(new PartitionValue("2025-07-26 00:00:00")), - Lists.newArrayList(new PartitionValue("2025-07-27 00:00:00"))); - Assert.assertEquals(2, res.size()); - Assert.assertEquals(Sets.newHashSet("name1"), res.get(expectDesc20250725)); - Assert.assertEquals(Sets.newHashSet("name2"), res.get(expectDesc20250726)); + try (MockedStatic mtmvPartitionUtilStatic = Mockito.mockStatic(MTMVPartitionUtil.class)) { + mtmvPartitionUtilStatic.when(() -> MTMVPartitionUtil.getPartitionColumnType( + Mockito.nullable(MTMVRelatedTableIf.class), Mockito.nullable(String.class))) + .thenReturn(Type.DATETIMEV2); + Mockito.when(mtmvPartitionInfo.getRelatedTable()).thenReturn(null); + Mockito.when(mtmvPartitionInfo.getExpr()).thenReturn(expr); + Mockito.when(mtmvPartitionInfo.getPartitionType()).thenReturn(MTMVPartitionType.EXPR); + + MTMVRelatedPartitionDescRollUpGenerator generator = new MTMVRelatedPartitionDescRollUpGenerator(); + Map> relatedPartitionDescs = Maps.newHashMap(); + PartitionKeyDesc desc20250725 = PartitionKeyDesc.createFixed( + Lists.newArrayList(new PartitionValue("2025-07-25 03:00:00")), + Lists.newArrayList(new PartitionValue("2025-07-26 03:00:00"))); + PartitionKeyDesc desc20250726 = PartitionKeyDesc.createFixed( + Lists.newArrayList(new PartitionValue("2025-07-26 03:00:00")), + Lists.newArrayList(new PartitionValue("2025-07-27 03:00:00"))); + relatedPartitionDescs.put(desc20250725, Sets.newHashSet("name1")); + relatedPartitionDescs.put(desc20250726, Sets.newHashSet("name2")); + Map> res = generator.rollUpRange(relatedPartitionDescs, + mtmvPartitionInfo, null); + + PartitionKeyDesc expectDesc20250725 = PartitionKeyDesc.createFixed( + Lists.newArrayList(new PartitionValue("2025-07-25 00:00:00")), + Lists.newArrayList(new PartitionValue("2025-07-26 00:00:00"))); + PartitionKeyDesc expectDesc20250726 = PartitionKeyDesc.createFixed( + Lists.newArrayList(new PartitionValue("2025-07-26 00:00:00")), + Lists.newArrayList(new PartitionValue("2025-07-27 00:00:00"))); + Assert.assertEquals(2, res.size()); + Assert.assertEquals(Sets.newHashSet("name1"), res.get(expectDesc20250725)); + Assert.assertEquals(Sets.newHashSet("name2"), res.get(expectDesc20250726)); + } } private PartitionKeyDesc generateInDesc(String... values) { From 6f122f782cc3811676647b137dde2f8b17b80e4a Mon Sep 17 00:00:00 2001 From: Hakan Uzum Date: Tue, 14 Apr 2026 20:17:37 +0300 Subject: [PATCH 4/8] [fix](fe) Fix MTMV refresh predicates for date_add hour partition ### What problem does this PR solve? Issue Number: #62395 Related PR: #62410 Problem Summary: MTMV partition refresh builds base-table predicates directly from MV partition bounds. When MV partition expression is date_trunc(date_add/sub(col, INTERVAL N HOUR), unit), the bounds must be shifted by the inverse hour offset; otherwise refresh can miss rows or fail with no-partition errors. ### Release note Supports MTMV partition refresh when partition expression includes date_trunc(date_add/sub(..., INTERVAL N HOUR), ...). ### Check List (For Author) - Test: Regression test - ./run-regression-test.sh --run -d mtmv_p0 -s test_rollup_partition_mtmv_date_add (OrbStack Linux) - Behavior changed: Yes (MV refresh predicate mapping for hour-offset partition expressions) - Does this need documentation: No --- .../commands/UpdateMvByPartitionCommand.java | 101 ++++++- ...test_rollup_partition_mtmv_date_add.groovy | 286 +++++++++++++++++- 2 files changed, 377 insertions(+), 10 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/UpdateMvByPartitionCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/UpdateMvByPartitionCommand.java index 74f2bd1bfec007..588e5e4293e2cf 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/UpdateMvByPartitionCommand.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/UpdateMvByPartitionCommand.java @@ -17,6 +17,10 @@ package org.apache.doris.nereids.trees.plans.commands; +import org.apache.doris.analysis.Expr; +import org.apache.doris.analysis.FunctionCallExpr; +import org.apache.doris.analysis.LiteralExpr; +import org.apache.doris.analysis.TimestampArithmeticExpr; import org.apache.doris.catalog.Env; import org.apache.doris.catalog.ListPartitionItem; import org.apache.doris.catalog.MTMV; @@ -45,6 +49,10 @@ import org.apache.doris.nereids.trees.expressions.LessThan; import org.apache.doris.nereids.trees.expressions.Slot; import org.apache.doris.nereids.trees.expressions.literal.BooleanLiteral; +import org.apache.doris.nereids.trees.expressions.literal.DateLiteral; +import org.apache.doris.nereids.trees.expressions.literal.DateTimeLiteral; +import org.apache.doris.nereids.trees.expressions.literal.DateTimeV2Literal; +import org.apache.doris.nereids.trees.expressions.literal.DateV2Literal; import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.literal.NullLiteral; import org.apache.doris.nereids.trees.plans.Plan; @@ -136,9 +144,10 @@ private static Map> constructTableWithPredicates(MTMV m PartitionItem partitionItem = mv.getPartitionItemOrAnalysisException(partitionName); items.add(partitionItem); } + Optional inverseOffsetHours = getInverseHourOffsetForBaseFilter(mv); ImmutableMap.Builder> builder = new ImmutableMap.Builder<>(); tableWithPartKey.forEach((table, colName) -> - builder.put(table, constructPredicates(items, colName)) + builder.put(table, constructPredicates(items, colName, inverseOffsetHours)) ); return builder.build(); } @@ -153,23 +162,40 @@ public static Set constructPredicates(Set partitions, return constructPredicates(partitions, slot); } + @VisibleForTesting + public static Set constructPredicates(Set partitions, + String colName, Optional hourOffset) { + UnboundSlot slot = new UnboundSlot(colName); + return constructPredicates(partitions, slot, hourOffset); + } + /** * construct predicates for partition items, the min key is the min key of range items. * For list partition or less than partition items, the min key is null. */ @VisibleForTesting public static Set constructPredicates(Set partitions, Slot colSlot) { + return constructPredicates(partitions, colSlot, Optional.empty()); + } + + /** + * Construct predicates from partition items and apply optional hour shift to date-like bounds. + */ + @VisibleForTesting + public static Set constructPredicates(Set partitions, Slot colSlot, + Optional hourOffset) { Set predicates = new HashSet<>(); if (partitions.isEmpty()) { return Sets.newHashSet(BooleanLiteral.TRUE); } + long shiftHours = hourOffset.orElse(0L); if (partitions.iterator().next() instanceof ListPartitionItem) { for (PartitionItem item : partitions) { - predicates.add(convertListPartitionToIn(item, colSlot)); + predicates.add(convertListPartitionToIn(item, colSlot, shiftHours)); } } else { for (PartitionItem item : partitions) { - predicates.add(convertRangePartitionToCompare(item, colSlot)); + predicates.add(convertRangePartitionToCompare(item, colSlot, shiftHours)); } } return predicates; @@ -180,9 +206,10 @@ private static Expression convertPartitionKeyToLiteral(PartitionKey key) { Type.fromPrimitiveType(key.getTypes().get(0))); } - private static Expression convertListPartitionToIn(PartitionItem item, Slot col) { + private static Expression convertListPartitionToIn(PartitionItem item, Slot col, long shiftHours) { List inValues = ((ListPartitionItem) item).getItems().stream() .map(UpdateMvByPartitionCommand::convertPartitionKeyToLiteral) + .map(literal -> shiftDateLikeLiteralByHours(literal, shiftHours)) .collect(ImmutableList.toImmutableList()); List predicates = new ArrayList<>(); if (inValues.stream().anyMatch(NullLiteral.class::isInstance)) { @@ -201,16 +228,18 @@ private static Expression convertListPartitionToIn(PartitionItem item, Slot col) return ExpressionUtils.or(predicates); } - private static Expression convertRangePartitionToCompare(PartitionItem item, Slot col) { + private static Expression convertRangePartitionToCompare(PartitionItem item, Slot col, long shiftHours) { Range range = item.getItems(); List expressions = new ArrayList<>(); if (range.hasLowerBound() && !range.lowerEndpoint().isMinValue()) { PartitionKey key = range.lowerEndpoint(); - expressions.add(new GreaterThanEqual(col, convertPartitionKeyToLiteral(key))); + expressions.add(new GreaterThanEqual(col, + shiftDateLikeLiteralByHours(convertPartitionKeyToLiteral(key), shiftHours))); } if (range.hasUpperBound() && !range.upperEndpoint().isMaxValue()) { PartitionKey key = range.upperEndpoint(); - expressions.add(new LessThan(col, convertPartitionKeyToLiteral(key))); + expressions.add(new LessThan(col, + shiftDateLikeLiteralByHours(convertPartitionKeyToLiteral(key), shiftHours))); } if (expressions.isEmpty()) { return BooleanLiteral.of(true); @@ -224,6 +253,64 @@ private static Expression convertRangePartitionToCompare(PartitionItem item, Slo return predicate; } + /** + * For partition expression like date_trunc(date_add/sub(base_col, INTERVAL N HOUR), unit), + * the base-table filter should use inverse offset (target - N / + N). + */ + private static Optional getInverseHourOffsetForBaseFilter(MTMV mv) { + Expr partitionExpr = mv.getMvPartitionInfo().getExpr(); + if (!(partitionExpr instanceof FunctionCallExpr)) { + return Optional.empty(); + } + FunctionCallExpr functionCallExpr = (FunctionCallExpr) partitionExpr; + if (!"date_trunc".equalsIgnoreCase(functionCallExpr.getFnName().getFunction())) { + return Optional.empty(); + } + List params = functionCallExpr.getParams().exprs(); + if (params.size() != 2 || !(params.get(0) instanceof TimestampArithmeticExpr)) { + return Optional.empty(); + } + TimestampArithmeticExpr timestampArithmeticExpr = (TimestampArithmeticExpr) params.get(0); + if (timestampArithmeticExpr.getTimeUnit() != TimestampArithmeticExpr.TimeUnit.HOUR + || !(timestampArithmeticExpr.getChild(1) instanceof LiteralExpr)) { + return Optional.empty(); + } + long offsetHours; + try { + offsetHours = Long.parseLong(((LiteralExpr) timestampArithmeticExpr.getChild(1)).getStringValue()); + } catch (NumberFormatException e) { + return Optional.empty(); + } + String funcName = timestampArithmeticExpr.getFuncName().toLowerCase(); + if ("date_sub".equals(funcName)) { + offsetHours = -offsetHours; + } else if (!"date_add".equals(funcName)) { + return Optional.empty(); + } + return Optional.of(-offsetHours); + } + + private static Expression shiftDateLikeLiteralByHours(Expression expression, long shiftHours) { + if (shiftHours == 0) { + return expression; + } + if (expression instanceof DateTimeV2Literal) { + return ((DateTimeV2Literal) expression).plusHours(shiftHours); + } + if (expression instanceof DateTimeLiteral) { + return ((DateTimeLiteral) expression).plusHours(shiftHours); + } + if (expression instanceof DateV2Literal) { + return ((DateV2Literal) expression).toBeginOfTheDay().plusHours(shiftHours); + } + if (expression instanceof DateLiteral) { + DateLiteral dateLiteral = (DateLiteral) expression; + return new DateTimeLiteral(dateLiteral.getYear(), dateLiteral.getMonth(), dateLiteral.getDay(), + 0, 0, 0).plusHours(shiftHours); + } + return expression; + } + /** * Add predicates on base table when mv can partition update, Also support plan that contain cte and view */ diff --git a/regression-test/suites/mtmv_p0/test_rollup_partition_mtmv_date_add.groovy b/regression-test/suites/mtmv_p0/test_rollup_partition_mtmv_date_add.groovy index e2fac8b49b31e8..b5c00efa97d597 100644 --- a/regression-test/suites/mtmv_p0/test_rollup_partition_mtmv_date_add.groovy +++ b/regression-test/suites/mtmv_p0/test_rollup_partition_mtmv_date_add.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_rollup_partition_mtmv_date_add") { +suite("test_rollup_partition_mtmv_date_add", "mtmv") { sql """drop materialized view if exists mv_test_rollup_partition_mtmv_date_add""" sql """drop table if exists t_test_rollup_partition_mtmv_date_add""" @@ -59,7 +59,7 @@ suite("test_rollup_partition_mtmv_date_add") { assertTrue(showPartitionsResult.toString().contains("2025-07-26 00:00:00")) def mvRows = sql """ - SELECT day_alias, cnt + SELECT date_format(day_alias, '%Y-%m-%d %H:%i:%s') AS day_alias, cnt FROM mv_test_rollup_partition_mtmv_date_add ORDER BY day_alias """ @@ -83,6 +83,286 @@ suite("test_rollup_partition_mtmv_date_add") { FROM t_test_rollup_partition_mtmv_date_add GROUP BY day_alias; """ - exception "unsupported argument" + exception "invalid expression is minutes_add" } + + // DATETIMEV2(6) + 'T' separator + fractional seconds + sql """drop materialized view if exists mv_test_rollup_partition_mtmv_date_add_datetimev2_formats""" + sql """drop table if exists t_test_rollup_partition_mtmv_date_add_datetimev2_formats""" + + sql """ + CREATE TABLE t_test_rollup_partition_mtmv_date_add_datetimev2_formats ( + id BIGINT NOT NULL, + k2 DATETIMEV2(6) NOT NULL + ) ENGINE=OLAP + DUPLICATE KEY(id) + PARTITION BY range(k2) + ( + PARTITION p_20250724 VALUES [("2025-07-24 21:00:00"),("2025-07-25 21:00:00")), + PARTITION p_20250725 VALUES [("2025-07-25 21:00:00"),("2025-07-26 21:00:00")) + ) + DISTRIBUTED BY HASH(id) BUCKETS 1 + PROPERTIES ('replication_num' = '1'); + """ + + sql """ + INSERT INTO t_test_rollup_partition_mtmv_date_add_datetimev2_formats VALUES + (1, "2025-07-24T21:01:23.123456"), + (2, "2025-07-25 20:59:00.999999"), + (3, "2025-07-25T21:10:00.000001"); + """ + + sql """ + CREATE MATERIALIZED VIEW mv_test_rollup_partition_mtmv_date_add_datetimev2_formats + BUILD IMMEDIATE REFRESH AUTO ON MANUAL + partition by (date_trunc(day_alias, 'day')) + DISTRIBUTED BY RANDOM BUCKETS 1 + PROPERTIES ('replication_num' = '1') + AS + SELECT date_trunc(date_add(k2, INTERVAL 3 HOUR), 'day') AS day_alias, count(*) AS cnt + FROM t_test_rollup_partition_mtmv_date_add_datetimev2_formats + GROUP BY day_alias; + """ + + waitingMTMVTaskFinishedByMvName("mv_test_rollup_partition_mtmv_date_add_datetimev2_formats") + + def v2ShowPartitionsResult = sql """show partitions from mv_test_rollup_partition_mtmv_date_add_datetimev2_formats""" + assertEquals(2, v2ShowPartitionsResult.size()) + assertTrue(v2ShowPartitionsResult.toString().contains("2025-07-25 00:00:00")) + assertTrue(v2ShowPartitionsResult.toString().contains("2025-07-26 00:00:00")) + + def v2MvRows = sql """ + SELECT date_format(day_alias, '%Y-%m-%d %H:%i:%s') AS day_alias, cnt + FROM mv_test_rollup_partition_mtmv_date_add_datetimev2_formats + ORDER BY day_alias + """ + assertEquals(2, v2MvRows.size()) + assertEquals("2025-07-25 00:00:00", v2MvRows[0][0].toString()) + assertEquals("2", v2MvRows[0][1].toString()) + assertEquals("2025-07-26 00:00:00", v2MvRows[1][0].toString()) + assertEquals("1", v2MvRows[1][1].toString()) + + // WEEK (week starts at Monday 00:00:00, so base partition boundary should be Sunday 21:00:00 for +3h shift) + sql """drop materialized view if exists mv_test_rollup_partition_mtmv_date_add_week""" + sql """drop table if exists t_test_rollup_partition_mtmv_date_add_week""" + + sql """ + CREATE TABLE t_test_rollup_partition_mtmv_date_add_week ( + id BIGINT NOT NULL, + k2 DATETIMEV2(6) NOT NULL + ) ENGINE=OLAP + DUPLICATE KEY(id) + PARTITION BY range(k2) + ( + PARTITION p_20250707 VALUES [("2025-07-06 21:00:00"),("2025-07-13 21:00:00")), + PARTITION p_20250714 VALUES [("2025-07-13 21:00:00"),("2025-07-20 21:00:00")) + ) + DISTRIBUTED BY HASH(id) BUCKETS 1 + PROPERTIES ('replication_num' = '1'); + """ + + sql """ + INSERT INTO t_test_rollup_partition_mtmv_date_add_week VALUES + (1, "2025-07-06 21:01:23.000000"), + (2, "2025-07-13 20:59:00.000000"), + (3, "2025-07-13 21:10:00.000000"); + """ + + sql """ + CREATE MATERIALIZED VIEW mv_test_rollup_partition_mtmv_date_add_week + BUILD IMMEDIATE REFRESH AUTO ON MANUAL + partition by (week_alias) + DISTRIBUTED BY RANDOM BUCKETS 1 + PROPERTIES ('replication_num' = '1') + AS + SELECT date_trunc(date_add(k2, INTERVAL 3 HOUR), 'week') AS week_alias, count(*) AS cnt + FROM t_test_rollup_partition_mtmv_date_add_week + GROUP BY week_alias; + """ + + waitingMTMVTaskFinishedByMvName("mv_test_rollup_partition_mtmv_date_add_week") + + def weekPartitions = sql """show partitions from mv_test_rollup_partition_mtmv_date_add_week""" + assertEquals(2, weekPartitions.size()) + assertTrue(weekPartitions.toString().contains("2025-07-07 00:00:00")) + assertTrue(weekPartitions.toString().contains("2025-07-14 00:00:00")) + + def weekRows = sql """ + SELECT date_format(week_alias, '%Y-%m-%d %H:%i:%s') AS k, cnt + FROM mv_test_rollup_partition_mtmv_date_add_week + ORDER BY k + """ + assertEquals(2, weekRows.size()) + assertEquals("2025-07-07 00:00:00", weekRows[0][0].toString()) + assertEquals("2", weekRows[0][1].toString()) + assertEquals("2025-07-14 00:00:00", weekRows[1][0].toString()) + assertEquals("1", weekRows[1][1].toString()) + + // MONTH (base boundary should be (month_start - 3h) for +3h shift) + sql """drop materialized view if exists mv_test_rollup_partition_mtmv_date_add_month""" + sql """drop table if exists t_test_rollup_partition_mtmv_date_add_month""" + + sql """ + CREATE TABLE t_test_rollup_partition_mtmv_date_add_month ( + id BIGINT NOT NULL, + k2 DATETIMEV2(6) NOT NULL + ) ENGINE=OLAP + DUPLICATE KEY(id) + PARTITION BY range(k2) + ( + PARTITION p_202508 VALUES [("2025-07-31 21:00:00"),("2025-08-31 21:00:00")), + PARTITION p_202509 VALUES [("2025-08-31 21:00:00"),("2025-09-30 21:00:00")) + ) + DISTRIBUTED BY HASH(id) BUCKETS 1 + PROPERTIES ('replication_num' = '1'); + """ + + sql """ + INSERT INTO t_test_rollup_partition_mtmv_date_add_month VALUES + (1, "2025-07-31 21:01:23.000000"), + (2, "2025-08-31 20:59:00.000000"), + (3, "2025-08-31 21:10:00.000000"); + """ + + sql """ + CREATE MATERIALIZED VIEW mv_test_rollup_partition_mtmv_date_add_month + BUILD IMMEDIATE REFRESH AUTO ON MANUAL + partition by (month_alias) + DISTRIBUTED BY RANDOM BUCKETS 1 + PROPERTIES ('replication_num' = '1') + AS + SELECT date_trunc(date_add(k2, INTERVAL 3 HOUR), 'month') AS month_alias, count(*) AS cnt + FROM t_test_rollup_partition_mtmv_date_add_month + GROUP BY month_alias; + """ + + waitingMTMVTaskFinishedByMvName("mv_test_rollup_partition_mtmv_date_add_month") + + def monthPartitions = sql """show partitions from mv_test_rollup_partition_mtmv_date_add_month""" + assertEquals(2, monthPartitions.size()) + assertTrue(monthPartitions.toString().contains("2025-08-01 00:00:00")) + assertTrue(monthPartitions.toString().contains("2025-09-01 00:00:00")) + + def monthRows = sql """ + SELECT date_format(month_alias, '%Y-%m-%d %H:%i:%s') AS k, cnt + FROM mv_test_rollup_partition_mtmv_date_add_month + ORDER BY k + """ + assertEquals(2, monthRows.size()) + assertEquals("2025-08-01 00:00:00", monthRows[0][0].toString()) + assertEquals("2", monthRows[0][1].toString()) + assertEquals("2025-09-01 00:00:00", monthRows[1][0].toString()) + assertEquals("1", monthRows[1][1].toString()) + + // QUARTER (base boundary should be (quarter_start - 3h) for +3h shift) + sql """drop materialized view if exists mv_test_rollup_partition_mtmv_date_add_quarter""" + sql """drop table if exists t_test_rollup_partition_mtmv_date_add_quarter""" + + sql """ + CREATE TABLE t_test_rollup_partition_mtmv_date_add_quarter ( + id BIGINT NOT NULL, + k2 DATETIMEV2(6) NOT NULL + ) ENGINE=OLAP + DUPLICATE KEY(id) + PARTITION BY range(k2) + ( + PARTITION p_2025q3 VALUES [("2025-06-30 21:00:00"),("2025-09-30 21:00:00")), + PARTITION p_2025q4 VALUES [("2025-09-30 21:00:00"),("2025-12-31 21:00:00")) + ) + DISTRIBUTED BY HASH(id) BUCKETS 1 + PROPERTIES ('replication_num' = '1'); + """ + + sql """ + INSERT INTO t_test_rollup_partition_mtmv_date_add_quarter VALUES + (1, "2025-06-30 21:01:23.000000"), + (2, "2025-09-30 20:59:00.000000"), + (3, "2025-09-30 21:10:00.000000"); + """ + + sql """ + CREATE MATERIALIZED VIEW mv_test_rollup_partition_mtmv_date_add_quarter + BUILD IMMEDIATE REFRESH AUTO ON MANUAL + partition by (quarter_alias) + DISTRIBUTED BY RANDOM BUCKETS 1 + PROPERTIES ('replication_num' = '1') + AS + SELECT date_trunc(date_add(k2, INTERVAL 3 HOUR), 'quarter') AS quarter_alias, count(*) AS cnt + FROM t_test_rollup_partition_mtmv_date_add_quarter + GROUP BY quarter_alias; + """ + + waitingMTMVTaskFinishedByMvName("mv_test_rollup_partition_mtmv_date_add_quarter") + + def quarterPartitions = sql """show partitions from mv_test_rollup_partition_mtmv_date_add_quarter""" + assertEquals(2, quarterPartitions.size()) + assertTrue(quarterPartitions.toString().contains("2025-07-01 00:00:00")) + assertTrue(quarterPartitions.toString().contains("2025-10-01 00:00:00")) + + def quarterRows = sql """ + SELECT date_format(quarter_alias, '%Y-%m-%d %H:%i:%s') AS k, cnt + FROM mv_test_rollup_partition_mtmv_date_add_quarter + ORDER BY k + """ + assertEquals(2, quarterRows.size()) + assertEquals("2025-07-01 00:00:00", quarterRows[0][0].toString()) + assertEquals("2", quarterRows[0][1].toString()) + assertEquals("2025-10-01 00:00:00", quarterRows[1][0].toString()) + assertEquals("1", quarterRows[1][1].toString()) + + // YEAR (base boundary should be (year_start - 3h) for +3h shift) + sql """drop materialized view if exists mv_test_rollup_partition_mtmv_date_add_year""" + sql """drop table if exists t_test_rollup_partition_mtmv_date_add_year""" + + sql """ + CREATE TABLE t_test_rollup_partition_mtmv_date_add_year ( + id BIGINT NOT NULL, + k2 DATETIMEV2(6) NOT NULL + ) ENGINE=OLAP + DUPLICATE KEY(id) + PARTITION BY range(k2) + ( + PARTITION p_2025 VALUES [("2024-12-31 21:00:00"),("2025-12-31 21:00:00")), + PARTITION p_2026 VALUES [("2025-12-31 21:00:00"),("2026-12-31 21:00:00")) + ) + DISTRIBUTED BY HASH(id) BUCKETS 1 + PROPERTIES ('replication_num' = '1'); + """ + + sql """ + INSERT INTO t_test_rollup_partition_mtmv_date_add_year VALUES + (1, "2024-12-31 21:01:23.000000"), + (2, "2025-12-31 20:59:00.000000"), + (3, "2025-12-31 21:10:00.000000"); + """ + + sql """ + CREATE MATERIALIZED VIEW mv_test_rollup_partition_mtmv_date_add_year + BUILD IMMEDIATE REFRESH AUTO ON MANUAL + partition by (year_alias) + DISTRIBUTED BY RANDOM BUCKETS 1 + PROPERTIES ('replication_num' = '1') + AS + SELECT date_trunc(date_add(k2, INTERVAL 3 HOUR), 'year') AS year_alias, count(*) AS cnt + FROM t_test_rollup_partition_mtmv_date_add_year + GROUP BY year_alias; + """ + + waitingMTMVTaskFinishedByMvName("mv_test_rollup_partition_mtmv_date_add_year") + + def yearPartitions = sql """show partitions from mv_test_rollup_partition_mtmv_date_add_year""" + assertEquals(2, yearPartitions.size()) + assertTrue(yearPartitions.toString().contains("2025-01-01 00:00:00")) + assertTrue(yearPartitions.toString().contains("2026-01-01 00:00:00")) + + def yearRows = sql """ + SELECT date_format(year_alias, '%Y-%m-%d %H:%i:%s') AS k, cnt + FROM mv_test_rollup_partition_mtmv_date_add_year + ORDER BY k + """ + assertEquals(2, yearRows.size()) + assertEquals("2025-01-01 00:00:00", yearRows[0][0].toString()) + assertEquals("2", yearRows[0][1].toString()) + assertEquals("2026-01-01 00:00:00", yearRows[1][0].toString()) + assertEquals("1", yearRows[1][1].toString()) } From c0366bdfd648d9c2a7f7b397f101380f967dcbad Mon Sep 17 00:00:00 2001 From: Hakan Uzum Date: Tue, 14 Apr 2026 23:02:47 +0300 Subject: [PATCH 5/8] [fix](fe) Apply hour-offset to union-compensation predicates ### What problem does this PR solve? Issue Number: N/A Related PR: #62410 Problem Summary: MTMV partitioned by `date_trunc(date_add/sub(base_col, INTERVAL N HOUR), ...)` needs inverse-hour shifting when translating partition names back into base-table predicates. The refresh path applied the shift, but the optimizer union-compensation path reconstructed predicates without the offset, which could miss rows or produce duplicates. ### Release note Fix MTMV union-compensation predicate bounds for hour-offset date_add/date_sub partition expressions. ### Check List (For Author) - Test: No need to test (local environment lacks JDK 17) - Behavior changed: Yes (query rewrite/union-compensation correctness) - Does this need documentation: No --- .../mv/AbstractMaterializedViewRule.java | 6 +- .../rules/exploration/mv/StructInfo.java | 5 +- .../commands/UpdateMvByPartitionCommand.java | 16 ++- ...pensation_mtmv_date_add_hour_offset.groovy | 123 ++++++++++++++++++ 4 files changed, 145 insertions(+), 5 deletions(-) create mode 100644 regression-test/suites/mtmv_p0/test_union_compensation_mtmv_date_add_hour_offset.groovy diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/AbstractMaterializedViewRule.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/AbstractMaterializedViewRule.java index 9f76c242dcf0f5..5ed9df895a72cd 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/AbstractMaterializedViewRule.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/AbstractMaterializedViewRule.java @@ -57,6 +57,7 @@ import org.apache.doris.nereids.trees.plans.TableId; import org.apache.doris.nereids.trees.plans.algebra.CatalogRelation; import org.apache.doris.nereids.trees.plans.algebra.SetOperation.Qualifier; +import org.apache.doris.nereids.trees.plans.commands.UpdateMvByPartitionCommand; import org.apache.doris.nereids.trees.plans.logical.LogicalFilter; import org.apache.doris.nereids.trees.plans.logical.LogicalOlapScan; import org.apache.doris.nereids.trees.plans.logical.LogicalTopN; @@ -382,8 +383,11 @@ protected List doRewrite(StructInfo queryStructInfo, CascadesContext casca return rewriteResults; } if (partitionNeedUnion) { + Optional inverseOffsetHours = + UpdateMvByPartitionCommand.getInverseHourOffsetForBaseFilter(mtmv); Pair planAndNeedAddFilterPair = - StructInfo.addFilterOnTableScan(queryPlan, invalidPartitions.value(), cascadesContext); + StructInfo.addFilterOnTableScan(queryPlan, invalidPartitions.value(), cascadesContext, + inverseOffsetHours); if (planAndNeedAddFilterPair == null) { materializationContext.recordFailReason(queryStructInfo, "Add filter to base table fail when union rewrite", diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/StructInfo.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/StructInfo.java index a45863aa32c3c7..a045d36919cccf 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/StructInfo.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/StructInfo.java @@ -963,9 +963,10 @@ public Plan visitLogicalOlapScan(LogicalOlapScan olapScan, * return null if add filter fail. */ public static Pair addFilterOnTableScan(Plan queryPlan, - Map> partitionOnBaseTableMap, CascadesContext parentCascadesContext) { + Map> partitionOnBaseTableMap, CascadesContext parentCascadesContext, + Optional hourOffset) { // Firstly, construct filter form invalid partition, this filter should be added on origin plan - PredicateAddContext predicateAddContext = new PredicateAddContext(null, partitionOnBaseTableMap); + PredicateAddContext predicateAddContext = new PredicateAddContext(null, partitionOnBaseTableMap, hourOffset); Plan queryPlanWithUnionFilter = queryPlan.accept(new PredicateAdder(), predicateAddContext); if (!predicateAddContext.isHandleSuccess()) { return null; diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/UpdateMvByPartitionCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/UpdateMvByPartitionCommand.java index 588e5e4293e2cf..f01e974f2b13a0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/UpdateMvByPartitionCommand.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/UpdateMvByPartitionCommand.java @@ -257,7 +257,7 @@ private static Expression convertRangePartitionToCompare(PartitionItem item, Slo * For partition expression like date_trunc(date_add/sub(base_col, INTERVAL N HOUR), unit), * the base-table filter should use inverse offset (target - N / + N). */ - private static Optional getInverseHourOffsetForBaseFilter(MTMV mv) { + public static Optional getInverseHourOffsetForBaseFilter(MTMV mv) { Expr partitionExpr = mv.getMvPartitionInfo().getExpr(); if (!(partitionExpr instanceof FunctionCallExpr)) { return Optional.empty(); @@ -438,7 +438,8 @@ public Plan visitLogicalCatalogRelation(LogicalCatalogRelation catalogRelation, if (!partitionHasDataItems.isEmpty()) { return new LogicalFilter<>( ExpressionUtils.extractConjunctionToSet( - ExpressionUtils.or(constructPredicates(partitionHasDataItems, partitionSlot)) + ExpressionUtils.or(constructPredicates(partitionHasDataItems, partitionSlot, + predicates.getHourOffset())) ), catalogRelation ); @@ -457,14 +458,21 @@ public static class PredicateAddContext { private final Map> predicates; private final Map> partitions; + private final Optional hourOffset; private boolean handleSuccess = true; // when add filter by partition, if partition has no data, doesn't need to add filter. should be false private boolean needAddFilter = true; public PredicateAddContext(Map> predicates, Map> partitions) { + this(predicates, partitions, Optional.empty()); + } + + public PredicateAddContext(Map> predicates, + Map> partitions, Optional hourOffset) { this.predicates = predicates; this.partitions = partitions; + this.hourOffset = hourOffset == null ? Optional.empty() : hourOffset; } public Map> getPredicates() { @@ -475,6 +483,10 @@ public Map> getPartitions() { return partitions; } + public Optional getHourOffset() { + return hourOffset; + } + public boolean isEmpty() { return predicates == null && partitions == null; } diff --git a/regression-test/suites/mtmv_p0/test_union_compensation_mtmv_date_add_hour_offset.groovy b/regression-test/suites/mtmv_p0/test_union_compensation_mtmv_date_add_hour_offset.groovy new file mode 100644 index 00000000000000..a9f585789ab6f3 --- /dev/null +++ b/regression-test/suites/mtmv_p0/test_union_compensation_mtmv_date_add_hour_offset.groovy @@ -0,0 +1,123 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_union_compensation_mtmv_date_add_hour_offset", "mtmv") { + sql "SET enable_nereids_planner=true" + sql "SET enable_fallback_to_original_planner=false" + sql "SET enable_materialized_view_rewrite=true" + sql "SET enable_materialized_view_nest_rewrite=true" + sql "SET enable_materialized_view_union_rewrite=true" + sql "SET enable_nereids_timeout=false" + + sql """drop materialized view if exists mv_test_union_compensation_mtmv_date_add_hour_offset""" + sql """drop table if exists t_test_union_compensation_mtmv_date_add_hour_offset""" + + sql """ + CREATE TABLE t_test_union_compensation_mtmv_date_add_hour_offset ( + id BIGINT NOT NULL, + k2 DATETIME NOT NULL + ) ENGINE=OLAP + DUPLICATE KEY(id) + PARTITION BY range(k2) + ( + PARTITION p_20250724 VALUES [("2025-07-24 00:00:00"),("2025-07-25 00:00:00")), + PARTITION p_20250725 VALUES [("2025-07-25 00:00:00"),("2025-07-26 00:00:00")), + PARTITION p_20250726 VALUES [("2025-07-26 00:00:00"),("2025-07-27 00:00:00")) + ) + DISTRIBUTED BY HASH(id) BUCKETS 1 + PROPERTIES ('replication_num' = '1'); + """ + + // Two mv partitions: 2025-07-25 and 2025-07-26 (with +3 hour shift). + sql """ + INSERT INTO t_test_union_compensation_mtmv_date_add_hour_offset VALUES + (1, "2025-07-24 22:00:00"), + (2, "2025-07-25 20:00:00"), + (3, "2025-07-25 22:00:00"), + (4, "2025-07-26 10:00:00"); + """ + + sql """ + CREATE MATERIALIZED VIEW mv_test_union_compensation_mtmv_date_add_hour_offset + BUILD DEFERRED REFRESH AUTO ON MANUAL + partition by (date_trunc(day_alias, 'day')) + DISTRIBUTED BY RANDOM BUCKETS 1 + PROPERTIES ('replication_num' = '1') + AS + SELECT id, date_trunc(date_add(k2, INTERVAL 3 HOUR), 'day') AS day_alias + FROM t_test_union_compensation_mtmv_date_add_hour_offset; + """ + + def showPartitionsResult = sql """show partitions from mv_test_union_compensation_mtmv_date_add_hour_offset""" + logger.info("showPartitionsResult: " + showPartitionsResult.toString()) + + String partitionToRefresh = null + for (def row : showPartitionsResult) { + boolean containsLower = row.any { it != null && it.toString().contains("2025-07-26 00:00:00") } + boolean containsUpper = row.any { it != null && it.toString().contains("2025-07-27 00:00:00") } + if (!(containsLower && containsUpper)) { + continue + } + for (def cell : row) { + if (cell != null && cell.toString().startsWith("p_")) { + partitionToRefresh = cell.toString() + break + } + } + if (partitionToRefresh != null) { + break + } + } + assertTrue(partitionToRefresh != null) + + sql """ + REFRESH MATERIALIZED VIEW mv_test_union_compensation_mtmv_date_add_hour_offset partitions(${partitionToRefresh}); + """ + waitingMTMVTaskFinishedByMvName("mv_test_union_compensation_mtmv_date_add_hour_offset") + + def mvRows = sql """ + SELECT date_format(day_alias, '%Y-%m-%d %H:%i:%s') AS k, count(*) AS cnt + FROM mv_test_union_compensation_mtmv_date_add_hour_offset + GROUP BY k + ORDER BY k; + """ + assertEquals(1, mvRows.size()) + assertEquals("2025-07-26 00:00:00", mvRows[0][0].toString()) + assertEquals("2", mvRows[0][1].toString()) + + def querySql = """ + SELECT id, date_trunc(date_add(k2, INTERVAL 3 HOUR), 'day') AS day_alias + FROM t_test_union_compensation_mtmv_date_add_hour_offset + ORDER BY id + """ + mv_rewrite_success_without_check_chosen(querySql, "mv_test_union_compensation_mtmv_date_add_hour_offset") + + def explainResult = sql """ explain ${querySql} """ + logger.info("explainResult: " + explainResult.toString()) + assertTrue(explainResult.toString().contains("VUNION")) + + def queryRows = sql """ ${querySql} """ + assertEquals(4, queryRows.size()) + assertEquals("1", queryRows[0][0].toString()) + assertEquals("2025-07-25 00:00:00", queryRows[0][1].toString()) + assertEquals("2", queryRows[1][0].toString()) + assertEquals("2025-07-25 00:00:00", queryRows[1][1].toString()) + assertEquals("3", queryRows[2][0].toString()) + assertEquals("2025-07-26 00:00:00", queryRows[2][1].toString()) + assertEquals("4", queryRows[3][0].toString()) + assertEquals("2025-07-26 00:00:00", queryRows[3][1].toString()) +} From 0f29f35084075dcfe80a9e6b3994fbac02500622 Mon Sep 17 00:00:00 2001 From: Hakan Uzum Date: Wed, 15 Apr 2026 21:12:05 +0300 Subject: [PATCH 6/8] [test](regression) Avoid selecting raw bitmap column ### What problem does this PR solve? Issue Number: Related PR: Problem Summary: Regression case selected raw BITMAP/VARBINARY column and compared unstable/driver-dependent bytes; compare only bitmap_to_string output instead. ### Release note None ### Check List (For Author) - Test: No need to test (CI regression will cover) - Behavior changed: No - Does this need documentation: No --- .../test_aggregate_all_functions.out | 11 +++++------ .../test_aggregate_all_functions.groovy | 4 ++-- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/regression-test/data/query_p0/sql_functions/aggregate_functions/test_aggregate_all_functions.out b/regression-test/data/query_p0/sql_functions/aggregate_functions/test_aggregate_all_functions.out index f3aedc75042f13..79551403ff7d4c 100644 --- a/regression-test/data/query_p0/sql_functions/aggregate_functions/test_aggregate_all_functions.out +++ b/regression-test/data/query_p0/sql_functions/aggregate_functions/test_aggregate_all_functions.out @@ -12,13 +12,13 @@ beijing 100 xian 100 -- !select_all1 -- -20220201 eleme \N 10001,72173389 -20220201 meituan \N 10000,10001,72173389 -20220202 eleme \N 10001,72173389 -20220203 meituan \N 10000,10001,72173389 +20220201 eleme 10001,72173389 +20220201 meituan 10000,10001,72173389 +20220202 eleme 10001,72173389 +20220203 meituan 10000,10001,72173389 -- !select_all2 -- -20220202 eleme \N 10001,72173389 +20220202 eleme 10001,72173389 -- !bitmap_intersect -- 20220201 10001 @@ -326,4 +326,3 @@ beijing chengdu shanghai 20220201 0 1 20220201 1 3 20220202 2 4999 - diff --git a/regression-test/suites/query_p0/sql_functions/aggregate_functions/test_aggregate_all_functions.groovy b/regression-test/suites/query_p0/sql_functions/aggregate_functions/test_aggregate_all_functions.groovy index a4ba5e05e4cb28..e521065892153c 100644 --- a/regression-test/suites/query_p0/sql_functions/aggregate_functions/test_aggregate_all_functions.groovy +++ b/regression-test/suites/query_p0/sql_functions/aggregate_functions/test_aggregate_all_functions.groovy @@ -102,8 +102,8 @@ suite("test_aggregate_all_functions", "arrow_flight_sql") { sql "insert into ${tableName_03} select dt,page,to_bitmap(user_id_int) user_id from ${tableName_04}" sql "insert into ${tableName_03} select dt,page,bitmap_hash(user_id_str) user_id from ${tableName_04}" - qt_select_all1 "select *, bitmap_to_string(user_id) from pv_bitmap order by 1,2;" - qt_select_all2 "select *, bitmap_to_string(user_id) from pv_bitmap where dt = 20220202 order by 1,2;" + qt_select_all1 "select dt, page, bitmap_to_string(user_id) from pv_bitmap order by 1,2;" + qt_select_all2 "select dt, page, bitmap_to_string(user_id) from pv_bitmap where dt = 20220202 order by 1,2;" qt_bitmap_intersect "select dt, bitmap_to_string(bitmap_intersect(user_id_bitmap)) from ${tableName_04} group by dt order by dt" From fe56305ca4fe771a6a1613fa72ba038c6491929b Mon Sep 17 00:00:00 2001 From: Hakan Uzum Date: Wed, 15 Apr 2026 21:27:57 +0300 Subject: [PATCH 7/8] [fix](build) Fix license check diff in shallow clones ### What problem does this PR solve? Issue Number: Related PR: #62410 Problem Summary: License Check falls back to full-repo scan when git diff fails under shallow fetch, causing unrelated pre-existing files to fail the PR. Fetch full history for PR head and base to reliably generate incremental config. ### Release note None ### Check List (For Author) - Test: No need to test (GitHub Actions) - Behavior changed: No - Does this need documentation: No --- .github/workflows/license-eyes.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/license-eyes.yml b/.github/workflows/license-eyes.yml index ca77bda309c681..bbb9752fdec64e 100644 --- a/.github/workflows/license-eyes.yml +++ b/.github/workflows/license-eyes.yml @@ -41,16 +41,19 @@ jobs: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" if: ${{ github.event_name != 'pull_request_target' }} uses: actions/checkout@v3 + with: + fetch-depth: 0 - name: Checkout ${{ github.ref }} ( ${{ github.event.pull_request.head.sha }} ) if: ${{ github.event_name == 'pull_request_target' }} uses: actions/checkout@v3 with: ref: ${{ github.event.pull_request.head.sha }} + fetch-depth: 0 - name: Fetch base branch for diff if: github.event_name == 'pull_request_target' - run: git fetch --no-tags --depth=1 origin ${{ github.base_ref }} + run: git fetch --no-tags origin ${{ github.base_ref }} - name: Get changed files if: github.event_name == 'pull_request_target' From 4da3f7ca1c6b07ceedbffd217c1d25d2f6e5908d Mon Sep 17 00:00:00 2001 From: Hakan Uzum Date: Wed, 15 Apr 2026 21:35:28 +0300 Subject: [PATCH 8/8] Revert "[fix](build) Fix license check diff in shallow clones" This reverts commit fe56305ca4fe771a6a1613fa72ba038c6491929b. --- .github/workflows/license-eyes.yml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/.github/workflows/license-eyes.yml b/.github/workflows/license-eyes.yml index bbb9752fdec64e..ca77bda309c681 100644 --- a/.github/workflows/license-eyes.yml +++ b/.github/workflows/license-eyes.yml @@ -41,19 +41,16 @@ jobs: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" if: ${{ github.event_name != 'pull_request_target' }} uses: actions/checkout@v3 - with: - fetch-depth: 0 - name: Checkout ${{ github.ref }} ( ${{ github.event.pull_request.head.sha }} ) if: ${{ github.event_name == 'pull_request_target' }} uses: actions/checkout@v3 with: ref: ${{ github.event.pull_request.head.sha }} - fetch-depth: 0 - name: Fetch base branch for diff if: github.event_name == 'pull_request_target' - run: git fetch --no-tags origin ${{ github.base_ref }} + run: git fetch --no-tags --depth=1 origin ${{ github.base_ref }} - name: Get changed files if: github.event_name == 'pull_request_target'