Skip to content

Commit

Permalink
fix explain in aqe
Browse files Browse the repository at this point in the history
  • Loading branch information
ulysses-you committed Sep 16, 2020
1 parent 197f41f commit 95a3e02
Show file tree
Hide file tree
Showing 2 changed files with 43 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
--IMPORT explain.sql

--SET spark.sql.adaptive.enabled=true
--SET spark.sql.maxMetadataStringLength = 500
42 changes: 42 additions & 0 deletions sql/core/src/test/resources/sql-tests/results/explain-aqe.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,48 @@ struct<key:string,value:string>
spark.sql.codegen.wholeStage true


-- !query
EXPLAIN EXTENDED
SELECT sum(distinct val)
FROM explain_temp1
-- !query schema
struct<plan:string>
-- !query output
== Parsed Logical Plan ==
'Project [unresolvedalias('sum(distinct 'val), None)]
+- 'UnresolvedRelation [explain_temp1], []

== Analyzed Logical Plan ==
sum(DISTINCT val): bigint
Aggregate [sum(distinct cast(val#x as bigint)) AS sum(DISTINCT val)#xL]
+- SubqueryAlias spark_catalog.default.explain_temp1
+- Relation[key#x,val#x] parquet

== Optimized Logical Plan ==
Aggregate [sum(distinct cast(val#x as bigint)) AS sum(DISTINCT val)#xL]
+- Project [val#x]
+- Relation[key#x,val#x] parquet

== Physical Plan ==
AdaptiveSparkPlan isFinalPlan=false
+- == Current Plan ==
HashAggregate(keys=[], functions=[sum(distinct cast(val#x as bigint)#xL)], output=[sum(DISTINCT val)#xL])
+- Exchange SinglePartition, true, [id=#x]
+- HashAggregate(keys=[], functions=[partial_sum(distinct cast(val#x as bigint)#xL)], output=[sum#xL])
+- HashAggregate(keys=[cast(val#x as bigint)#xL], functions=[], output=[cast(val#x as bigint)#xL])
+- Exchange hashpartitioning(cast(val#x as bigint)#xL, 4), true, [id=#x]
+- HashAggregate(keys=[cast(val#x as bigint) AS cast(val#x as bigint)#xL], functions=[], output=[cast(val#x as bigint)#xL])
+- FileScan parquet default.explain_temp1[val#x] Batched: true, DataFilters: [], Format: Parquet, Location [not included in comparison]/{warehouse_dir}/explain_temp1], PartitionFilters: [], PushedFilters: [], ReadSchema: struct<val:int>
+- == Initial Plan ==
HashAggregate(keys=[], functions=[sum(distinct cast(val#x as bigint)#xL)], output=[sum(DISTINCT val)#xL])
+- Exchange SinglePartition, true, [id=#x]
+- HashAggregate(keys=[], functions=[partial_sum(distinct cast(val#x as bigint)#xL)], output=[sum#xL])
+- HashAggregate(keys=[cast(val#x as bigint)#xL], functions=[], output=[cast(val#x as bigint)#xL])
+- Exchange hashpartitioning(cast(val#x as bigint)#xL, 4), true, [id=#x]
+- HashAggregate(keys=[cast(val#x as bigint) AS cast(val#x as bigint)#xL], functions=[], output=[cast(val#x as bigint)#xL])
+- FileScan parquet default.explain_temp1[val#x] Batched: true, DataFilters: [], Format: Parquet, Location [not included in comparison]/{warehouse_dir}/explain_temp1], PartitionFilters: [], PushedFilters: [], ReadSchema: struct<val:int>


-- !query
EXPLAIN FORMATTED
SELECT key, max(val)
Expand Down

0 comments on commit 95a3e02

Please sign in to comment.