diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala index f840876fc5d00..c539452bb9ae0 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala @@ -147,6 +147,13 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite { |('2013-04-05 12:01:02'), |('2013-04-05 18:01:02.123'), |('2013-04-05 18:01:02.123456')""".stripMargin).executeUpdate() + + conn.prepareStatement("CREATE DOMAIN not_null_text AS TEXT DEFAULT ''").executeUpdate() + conn.prepareStatement("create table custom_type(type_array not_null_text[]," + + "type not_null_text)").executeUpdate() + conn.prepareStatement("INSERT INTO custom_type (type_array, type) VALUES" + + "('{1,fds,fdsa}','fdasfasdf')").executeUpdate() + } test("Type mapping for various types") { @@ -416,4 +423,13 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite { val df_actual = sqlContext.read.jdbc(jdbcUrl, "timestamp_ntz_roundtrip", prop) assert(df_actual.collect()(0) == df_expected.collect()(0)) } + + test("SPARK-43267: user-defined column in array test") { + val df = sqlContext.read.jdbc(jdbcUrl, "custom_type", new Properties) + val row = df.collect() + assert(row.length === 1) + assert(row(0).length === 2) + assert(row(0).getSeq[String](0) == Seq("1", "fds", "fdsa")) + assert(row(0).getString(1) == "fdasfasdf") + } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala index b42d575ae2d47..ab8b1a7e1a50d 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/PostgresDialect.scala @@ -94,13 +94,15 @@ private object PostgresDialect extends JdbcDialect with SQLConfHelper { case "numeric" | "decimal" if precision > 0 => Some(DecimalType.bounded(precision, scale)) case "numeric" | "decimal" => // SPARK-26538: handle numeric without explicit precision and scale. - Some(DecimalType. SYSTEM_DEFAULT) + Some(DecimalType.SYSTEM_DEFAULT) case "money" => // money[] type seems to be broken and difficult to handle. // So this method returns None for now. // See SPARK-34333 and https://github.com/pgjdbc/pgjdbc/issues/1405 None - case _ => None + case _ => + // SPARK-43267: handle unknown types in array as string, because there are user-defined types + Some(StringType) } override def convertJavaTimestampToTimestampNTZ(t: Timestamp): LocalDateTime = {