Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions src/Interpreters/ActionsDAG.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -253,6 +253,15 @@ ActionsDAG::Node & ActionsDAG::addNode(Node node)
{
auto & res = nodes.emplace_back(std::move(node));

// This should only be a temporary fix to avoid regression in 25.10
// https://github.com/ClickHouse/ClickHouse/issues/90363#issue-3642139014
if (res.type != ActionType::PLACEHOLDER)
{
const auto valid_column = !res.column || (res.column->isConst() || typeid_cast<const ColumnSet *>(res.column.get()) != nullptr);
if (!valid_column)
res.column = nullptr;
}

if (res.type == ActionType::INPUT)
inputs.emplace_back(&res);

Expand Down
6 changes: 4 additions & 2 deletions src/Processors/QueryPlan/CubeStep.cpp
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
#include <Processors/QueryPlan/CubeStep.h>

#include <Columns/ColumnConst.h>
#include <DataTypes/DataTypesNumber.h>
#include <Functions/FunctionFactory.h>
#include <Interpreters/ExpressionActions.h>
#include <Processors/QueryPlan/AggregatingStep.h>
#include <Processors/QueryPlan/CubeStep.h>
#include <Processors/Transforms/CubeTransform.h>
#include <Processors/Transforms/ExpressionTransform.h>
#include <QueryPipeline/QueryPipelineBuilder.h>
Expand Down Expand Up @@ -52,7 +54,7 @@ ProcessorPtr addGroupingSetForTotals(SharedHeader header, const Names & keys, bo
}
}

auto grouping_col = ColumnUInt64::create(1, grouping_set_number);
auto grouping_col = ColumnConst::create(ColumnUInt64::create(1, grouping_set_number), 1);
const auto * grouping_node = &dag.addColumn(
{ColumnPtr(std::move(grouping_col)), std::make_shared<DataTypeUInt64>(), "__grouping_set"});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ static AggregateProjectionInfo getAggregatingProjectionInfo(
/// We can do it because projection is stored for every part separately.
for (const auto & virt_column : key_virtual_columns)
{
const auto * input = &info.before_aggregation->addInput(virt_column);
const auto * input = &info.before_aggregation->addInput(virt_column.name, virt_column.type);
info.before_aggregation->getOutputs().push_back(input);
info.keys.push_back(virt_column.name);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
Expression ((Project names + Projection))
Aggregating
Expression (((Before GROUP BY + ) + Discarding unused columns))
Expression (Post Join Actions)
Join (JOIN FillRightFirst)
Expression (Left Pre Join Actions)
Expression (Post Join Actions)
Join (JOIN FillRightFirst)
Expression (Left Pre Join Actions)
Filter ((WHERE + (Change column names to column identifiers + (Project names + Projection))))
Window (Window step for window \'PARTITION BY __table3.key\')
Sorting (Sorting for window \'PARTITION BY __table3.key\')
Expression ((Before WINDOW + Change column names to column identifiers))
ReadFromMergeTree (default.test)
Expression (Right Pre Join Actions)
Filter ((((WHERE + (Change column names to column identifiers + (Project names + (Projection + )))) + ) + (Change column names to column identifiers + (Project names + Projection))))
Aggregating
Expression ((Before GROUP BY + Change column names to column identifiers))
ReadFromMergeTree (default.test)
Expression (Right Pre Join Actions)
Expression ((Change column names to column identifiers + (Project names + Projection)))
Aggregating
Expression ((Before GROUP BY + Change column names to column identifiers))
ReadFromMergeTree (default.test)
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
CREATE TABLE test
(
`key` UInt64,
`value` Int64
)
ENGINE = MergeTree
ORDER BY key;

INSERT INTO test SELECT cityHash64(number) AS key, number AS value FROM numbers(100);

SET enable_parallel_replicas = 0;
SET enable_analyzer = 1;
EXPLAIN PLAN
WITH
view_1 AS
(
SELECT
key,
ROW_NUMBER() OVER (PARTITION BY key) AS rn
FROM test
),
view_2 AS
(
SELECT
key,
count() > 0 AS has_any
FROM test
GROUP BY
key
),
events AS
(
SELECT
*
FROM view_1 AS v1
INNER JOIN view_2 AS v2_1 USING (key)
LEFT JOIN view_2 AS v2_2 USING (key)
WHERE v1.rn = 1
)
SELECT count()
FROM events
WHERE v2_1.has_any;
Loading