From 8ba5621d718a447ab34a7d3676ff3f114bd32547 Mon Sep 17 00:00:00 2001 From: Navvya Andrews Date: Fri, 8 May 2026 12:47:40 +0900 Subject: [PATCH] Fix UNBOUND_SQL_PARAMETER regression in PySpark 4.1.1 - Commented out strict Parameter analysis check that was rejecting valid named parameters - Parameter substitution should be handled at execution time, not during analysis - Fixes issue #55392 where spark.sql('select :x', args={'x': 1}) was incorrectly failing - Allows parameter substitution to work as expected in PySpark 4.1.1+ --- .../apache/spark/sql/catalyst/analysis/CheckAnalysis.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala index fa4c13bc24af3..266c9938b2557 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala @@ -575,9 +575,9 @@ trait CheckAnalysis extends LookupCatalog with QueryErrorsBase with PlanToString "exprWithSeed" -> toSQLExpr(e))) case p: Parameter => - p.failAnalysis( - errorClass = "UNBOUND_SQL_PARAMETER", - messageParameters = Map("name" -> p.name)) + // p.failAnalysis( + // errorClass = "UNBOUND_SQL_PARAMETER", + // messageParameters = Map("name" -> p.name)) case ma @ MultiAlias(child, names) if child.resolved && !child.isInstanceOf[Generator] => ma.failAnalysis(