-
Notifications
You must be signed in to change notification settings - Fork 2k
/
ExtendedDataSourceV2Strategy.scala
206 lines (172 loc) · 10 KB
/
ExtendedDataSourceV2Strategy.scala
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.spark.sql.execution.datasources.v2
import org.apache.iceberg.spark.Spark3Util
import org.apache.iceberg.spark.SparkCatalog
import org.apache.iceberg.spark.SparkSessionCatalog
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.Strategy
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.ResolvedIdentifier
import org.apache.spark.sql.catalyst.analysis.ResolvedNamespace
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
import org.apache.spark.sql.catalyst.expressions.PredicateHelper
import org.apache.spark.sql.catalyst.plans.logical.AddPartitionField
import org.apache.spark.sql.catalyst.plans.logical.Call
import org.apache.spark.sql.catalyst.plans.logical.CreateOrReplaceBranch
import org.apache.spark.sql.catalyst.plans.logical.CreateOrReplaceTag
import org.apache.spark.sql.catalyst.plans.logical.DescribeRelation
import org.apache.spark.sql.catalyst.plans.logical.DropBranch
import org.apache.spark.sql.catalyst.plans.logical.DropIdentifierFields
import org.apache.spark.sql.catalyst.plans.logical.DropPartitionField
import org.apache.spark.sql.catalyst.plans.logical.DropTag
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.plans.logical.MergeRows
import org.apache.spark.sql.catalyst.plans.logical.NoStatsUnaryNode
import org.apache.spark.sql.catalyst.plans.logical.OrderAwareCoalesce
import org.apache.spark.sql.catalyst.plans.logical.RenameTable
import org.apache.spark.sql.catalyst.plans.logical.ReplaceIcebergData
import org.apache.spark.sql.catalyst.plans.logical.ReplacePartitionField
import org.apache.spark.sql.catalyst.plans.logical.SetIdentifierFields
import org.apache.spark.sql.catalyst.plans.logical.SetViewProperties
import org.apache.spark.sql.catalyst.plans.logical.SetWriteDistributionAndOrdering
import org.apache.spark.sql.catalyst.plans.logical.ShowCreateTable
import org.apache.spark.sql.catalyst.plans.logical.ShowTableProperties
import org.apache.spark.sql.catalyst.plans.logical.UnsetViewProperties
import org.apache.spark.sql.catalyst.plans.logical.UpdateRows
import org.apache.spark.sql.catalyst.plans.logical.WriteIcebergDelta
import org.apache.spark.sql.catalyst.plans.logical.views.CreateIcebergView
import org.apache.spark.sql.catalyst.plans.logical.views.DropIcebergView
import org.apache.spark.sql.catalyst.plans.logical.views.ResolvedV2View
import org.apache.spark.sql.catalyst.plans.logical.views.ShowIcebergViews
import org.apache.spark.sql.connector.catalog.Identifier
import org.apache.spark.sql.connector.catalog.TableCatalog
import org.apache.spark.sql.connector.catalog.ViewCatalog
import org.apache.spark.sql.execution.OrderAwareCoalesceExec
import org.apache.spark.sql.execution.SparkPlan
import scala.jdk.CollectionConverters._
case class ExtendedDataSourceV2Strategy(spark: SparkSession) extends Strategy with PredicateHelper {
override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case c @ Call(procedure, args) =>
val input = buildInternalRow(args)
CallExec(c.output, procedure, input) :: Nil
case AddPartitionField(IcebergCatalogAndIdentifier(catalog, ident), transform, name) =>
AddPartitionFieldExec(catalog, ident, transform, name) :: Nil
case CreateOrReplaceBranch(
IcebergCatalogAndIdentifier(catalog, ident), branch, branchOptions, create, replace, ifNotExists) =>
CreateOrReplaceBranchExec(catalog, ident, branch, branchOptions, create, replace, ifNotExists) :: Nil
case CreateOrReplaceTag(
IcebergCatalogAndIdentifier(catalog, ident), tag, tagOptions, create, replace, ifNotExists) =>
CreateOrReplaceTagExec(catalog, ident, tag, tagOptions, create, replace, ifNotExists) :: Nil
case DropBranch(IcebergCatalogAndIdentifier(catalog, ident), branch, ifExists) =>
DropBranchExec(catalog, ident, branch, ifExists) :: Nil
case DropTag(IcebergCatalogAndIdentifier(catalog, ident), tag, ifExists) =>
DropTagExec(catalog, ident, tag, ifExists) :: Nil
case DropPartitionField(IcebergCatalogAndIdentifier(catalog, ident), transform) =>
DropPartitionFieldExec(catalog, ident, transform) :: Nil
case ReplacePartitionField(IcebergCatalogAndIdentifier(catalog, ident), transformFrom, transformTo, name) =>
ReplacePartitionFieldExec(catalog, ident, transformFrom, transformTo, name) :: Nil
case SetIdentifierFields(IcebergCatalogAndIdentifier(catalog, ident), fields) =>
SetIdentifierFieldsExec(catalog, ident, fields) :: Nil
case DropIdentifierFields(IcebergCatalogAndIdentifier(catalog, ident), fields) =>
DropIdentifierFieldsExec(catalog, ident, fields) :: Nil
case SetWriteDistributionAndOrdering(
IcebergCatalogAndIdentifier(catalog, ident), distributionMode, ordering) =>
SetWriteDistributionAndOrderingExec(catalog, ident, distributionMode, ordering) :: Nil
case ReplaceIcebergData(_: DataSourceV2Relation, query, r: DataSourceV2Relation, Some(write)) =>
// refresh the cache using the original relation
ReplaceDataExec(planLater(query), refreshCache(r), write) :: Nil
case WriteIcebergDelta(_: DataSourceV2Relation, query, r: DataSourceV2Relation, projs, Some(write)) =>
// refresh the cache using the original relation
WriteDeltaExec(planLater(query), refreshCache(r), projs, write) :: Nil
case MergeRows(isSourceRowPresent, isTargetRowPresent, matchedConditions, matchedOutputs, notMatchedConditions,
notMatchedOutputs, targetOutput, performCardinalityCheck, emitNotMatchedTargetRows,
output, child) =>
MergeRowsExec(isSourceRowPresent, isTargetRowPresent, matchedConditions, matchedOutputs, notMatchedConditions,
notMatchedOutputs, targetOutput, performCardinalityCheck, emitNotMatchedTargetRows,
output, planLater(child)) :: Nil
case UpdateRows(deleteOutput, insertOutput, output, child) =>
UpdateRowsExec(deleteOutput, insertOutput, output, planLater(child)) :: Nil
case NoStatsUnaryNode(child) =>
planLater(child) :: Nil
case OrderAwareCoalesce(numPartitions, coalescer, child) =>
OrderAwareCoalesceExec(numPartitions, coalescer, planLater(child)) :: Nil
case RenameTable(ResolvedV2View(oldCatalog: ViewCatalog, oldIdent), newName, isView@true) =>
val newIdent = Spark3Util.catalogAndIdentifier(spark, newName.toList.asJava)
if (oldCatalog.name != newIdent.catalog().name()) {
throw new AnalysisException(
s"Cannot move view between catalogs: from=${oldCatalog.name} and to=${newIdent.catalog().name()}")
}
RenameV2ViewExec(oldCatalog, oldIdent, newIdent.identifier()) :: Nil
case DropIcebergView(ResolvedIdentifier(viewCatalog: ViewCatalog, ident), ifExists) =>
DropV2ViewExec(viewCatalog, ident, ifExists) :: Nil
case CreateIcebergView(ResolvedIdentifier(viewCatalog: ViewCatalog, ident), queryText, query,
columnAliases, columnComments, queryColumnNames, comment, properties, allowExisting, replace, _) =>
CreateV2ViewExec(
catalog = viewCatalog,
ident = ident,
queryText = queryText,
columnAliases = columnAliases,
columnComments = columnComments,
queryColumnNames = queryColumnNames,
viewSchema = query.schema,
comment = comment,
properties = properties,
allowExisting = allowExisting,
replace = replace) :: Nil
case DescribeRelation(ResolvedV2View(catalog, ident), _, isExtended, output) =>
DescribeV2ViewExec(output, catalog.loadView(ident), isExtended) :: Nil
case ShowTableProperties(ResolvedV2View(catalog, ident), propertyKey, output) =>
ShowV2ViewPropertiesExec(output, catalog.loadView(ident), propertyKey) :: Nil
case ShowIcebergViews(ResolvedNamespace(catalog: ViewCatalog, namespace), pattern, output) =>
ShowV2ViewsExec(output, catalog, namespace, pattern) :: Nil
case ShowCreateTable(ResolvedV2View(catalog, ident), _, output) =>
ShowCreateV2ViewExec(output, catalog.loadView(ident)) :: Nil
case SetViewProperties(ResolvedV2View(catalog, ident), properties) =>
AlterV2ViewSetPropertiesExec(catalog, ident, properties) :: Nil
case UnsetViewProperties(ResolvedV2View(catalog, ident), propertyKeys, ifExists) =>
AlterV2ViewUnsetPropertiesExec(catalog, ident, propertyKeys, ifExists) :: Nil
case _ => Nil
}
private def buildInternalRow(exprs: Seq[Expression]): InternalRow = {
val values = new Array[Any](exprs.size)
for (index <- exprs.indices) {
values(index) = exprs(index).eval()
}
new GenericInternalRow(values)
}
private def refreshCache(r: DataSourceV2Relation)(): Unit = {
spark.sharedState.cacheManager.recacheByPlan(spark, r)
}
private object IcebergCatalogAndIdentifier {
def unapply(identifier: Seq[String]): Option[(TableCatalog, Identifier)] = {
val catalogAndIdentifier = Spark3Util.catalogAndIdentifier(spark, identifier.asJava)
catalogAndIdentifier.catalog match {
case icebergCatalog: SparkCatalog =>
Some((icebergCatalog, catalogAndIdentifier.identifier))
case icebergCatalog: SparkSessionCatalog[_] =>
Some((icebergCatalog, catalogAndIdentifier.identifier))
case _ =>
None
}
}
}
}