Skip to content
Closed

rm #33025

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion R/pkg/tests/fulltests/test_sparkSQL.R
Original file line number Diff line number Diff line change
Expand Up @@ -1682,7 +1682,7 @@ test_that("column functions", {
df <- as.DataFrame(list(list("col" = "{\"date\":\"21/10/2014\"}")))
schema2 <- structType(structField("date", "date"))
s <- collect(select(df, from_json(df$col, schema2)))
expect_equal(s[[1]][[1]], NA)
expect_equal(s[[1]][[1]]$date, NA)
s <- collect(select(df, from_json(df$col, schema2, dateFormat = "dd/MM/yyyy")))
expect_is(s[[1]][[1]]$date, "Date")
expect_equal(as.character(s[[1]][[1]]$date), "2014-10-21")
Expand Down
5 changes: 5 additions & 0 deletions build/spark-2.4.3
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
#!/bin/bash

./spark-build-info ../core/target/extra-resources 2.4.3

# ./build/spark-build-info ./core/target/extra-resources 2.4.3
3 changes: 3 additions & 0 deletions docs/sql-migration-guide-upgrade.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ displayTitle: Spark SQL Upgrading Guide
* Table of contents
{:toc}

## Upgrading from Spark SQL 2.4.3 to 2.4.3.1 (branch 2.4.3.1-kl)
- Since Spark 2.4.3.1, the `from_json` functions supports two modes - `PERMISSIVE` and `FAILFAST`. The modes can be set via the `mode` option. The default mode became `PERMISSIVE`. In previous versions, behavior of `from_json` did not conform to either `PERMISSIVE` nor `FAILFAST`, especially in processing of malformed JSON records. For example, the JSON string `{"a" 1}` with the schema `a INT` is converted to `null` by previous versions but Spark 2.4.3.1 converts it to `Row(null)`.

## Upgrading from Spark SQL 2.4 to 2.4.1

- The value of `spark.executor.heartbeatInterval`, when specified without units like "30" rather than "30s", was
Expand Down
1 change: 0 additions & 1 deletion external/kafka-0-10-sql/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
<relativePath>../../pom.xml</relativePath>
</parent>

<groupId>org.apache.spark</groupId>
<artifactId>spark-sql-kafka-0-10_2.11</artifactId>
<properties>
<sbt.project.name>sql-kafka-0-10</sbt.project.name>
Expand Down
6 changes: 6 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,17 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

<parent>
<groupId>org.apache</groupId>
<artifactId>apache</artifactId>
<version>18</version>
</parent>

<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.11</artifactId>
<version>2.4.3</version>

<packaging>pom</packaging>
<name>Spark Project Parent POM</name>
<url>http://spark.apache.org/</url>
Expand Down Expand Up @@ -109,6 +112,8 @@
</modules>

<properties>
<spark.base.version>2.4.3</spark.base.version>
<spark.fix.version>2.4.3.1-SNAPSHOT</spark.fix.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>1.8</java.version>
Expand Down Expand Up @@ -2885,4 +2890,5 @@
<id>sparkr</id>
</profile>
</profiles>

</project>
2 changes: 1 addition & 1 deletion python/pyspark/sql/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -2261,7 +2261,7 @@ def from_json(col, schema, options={}):
[Row(json=[Row(a=1)])]
>>> schema = schema_of_json(lit('''{"a": 0}'''))
>>> df.select(from_json(df.value, schema).alias("json")).collect()
[Row(json=Row(a=1))]
[Row(json=Row(a=None))]
>>> data = [(1, '''[1, 2, 3]''')]
>>> schema = ArrayType(IntegerType())
>>> df = spark.createDataFrame(data, ("key", "value"))
Expand Down
36 changes: 32 additions & 4 deletions sql/catalyst/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@
</parent>

<artifactId>spark-catalyst_2.11</artifactId>
<version>${spark.fix.version}</version>

<packaging>jar</packaging>
<name>Spark Project Catalyst</name>
<url>http://spark.apache.org/</url>
Expand All @@ -47,18 +49,31 @@
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<version>${spark.base.version}</version>
<exclusions>
<exclusion>
<artifactId>spark-tags_2.11</artifactId>
<groupId>org.apache.spark</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<version>${spark.base.version}</version>
<type>test-jar</type>
<scope>test</scope>
<exclusions>
<exclusion>
<artifactId>spark-tags_2.11</artifactId>
<groupId>org.apache.spark</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
<version>${spark.base.version}</version>
</dependency>

<!--
Expand All @@ -68,19 +83,32 @@
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-tags_${scala.binary.version}</artifactId>
<version>${spark.base.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-unsafe_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<version>${spark.base.version}</version>
<exclusions>
<exclusion>
<artifactId>spark-tags_2.11</artifactId>
<groupId>org.apache.spark</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sketch_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<version>${spark.base.version}</version>
<exclusions>
<exclusion>
<artifactId>spark-tags_2.11</artifactId>
<groupId>org.apache.spark</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.scalacheck</groupId>
Expand Down
Loading