Skip to content

Commit

Permalink
Changing Spark translation of DATEADD() from DATE_ADD() to `DATEA…
Browse files Browse the repository at this point in the history
…DD()` as required by some but not all Databricks instances. Fixes #366
  • Loading branch information
schuemie committed Jun 4, 2024
1 parent cfdec3a commit acdd6f3
Show file tree
Hide file tree
Showing 6 changed files with 53 additions and 51 deletions.
4 changes: 2 additions & 2 deletions DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
Package: SqlRender
Type: Package
Title: Rendering Parameterized SQL and Translation to Dialects
Version: 1.18.0
Date: 2024-05-30
Version: 1.18.1
Date: 2024-06-04
Authors@R: c(
person("Martijn", "Schuemie", , "schuemie@ohdsi.org", role = c("aut", "cre")),
person("Marc", "Suchard", role = c("aut"))
Expand Down
8 changes: 8 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
SqlRender 1.18.1
================

Changes:

1. Changing Spark translation of `DATEADD()` from `DATE_ADD()` to `DATEADD()` as required by some but not all Databricks instances.


SqlRender 1.18.0
================

Expand Down
20 changes: 7 additions & 13 deletions inst/csv/replacementPatterns.csv
Original file line number Diff line number Diff line change
Expand Up @@ -1090,19 +1090,13 @@ spark,"HASHBYTES('MD5',@a)","MD5(@a)"
spark,"CONVERT(VARBINARY, CONCAT('0x', @a), 1)","CAST(CONCAT('x', @a) AS BIT(32))"
spark,"CONVERT(DATE, @a)","TO_DATE(@a, 'yyyy-MM-dd')"
spark,"DATEPART(@part, @date)","DATE_PART('@part', @date)"
spark,"DATEADD(second,@seconds,@datetime)","DATE_ADD(second,@seconds,@datetime)"
spark,"DATEADD(minute,@minutes,@datetime)","DATE_ADD(minute,@minutes,@datetime)"
spark,"DATEADD(hour,@hours,@datetime)","DATE_ADD(hour,@hours,@datetime)"
spark,"DATEADD(d,@days,@date)","DATE_ADD(day,@days,@date)"
spark,"DATEADD(dd,@days,@date)","DATE_ADD(day,@days,@date)"
spark,"DATEADD(day,@days,@date)","DATE_ADD(day,@days,@date)"
spark,"DATEADD(m,@months,@date)","DATE_ADD(month,@months,@date)"
spark,"DATEADD(mm,@months,@date)","DATE_ADD(month,@months,@date)"
spark,"DATEADD(month,@months,@date)","DATE_ADD(month,@months,@date)"
spark,"DATEADD(yy,@years,@date)","DATE_ADD(year,@years,@date)"
spark,"DATEADD(yyyy,@years,@date)","DATE_ADD(year,@years,@date)"
spark,"DATEADD(year,@years,@date)","DATE_ADD(year,@years,@date)"
spark,"DATE_ADD(@part,@(-?[0-9]+)a.0,@date)","DATE_ADD(@part,@a,@date)"
spark,"DATEADD(d,@days,@date)","DATEADD(day,@days,@date)"
spark,"DATEADD(dd,@days,@date)","DATEADD(day,@days,@date)"
spark,"DATEADD(m,@months,@date)","DATEADD(month,@months,@date)"
spark,"DATEADD(mm,@months,@date)","DATEADD(month,@months,@date)"
spark,"DATEADD(yy,@years,@date)","DATEADD(year,@years,@date)"
spark,"DATEADD(yyyy,@years,@date)","DATEADD(year,@years,@date)"
spark,"DATEADD(@part,@(-?[0-9]+)a.0,@date)","DATEADD(@part,@a,@date)"
spark,INTERVAL @(-?[0-9]+)a.0,INTERVAL @a
spark,"DATEDIFF(d,@start, @end)","datediff(day,@start,@end)"
spark,"DATEDIFF(dd,@start, @end)","datediff(day,@start,@end)"
Expand Down
4 changes: 2 additions & 2 deletions man/loadRenderTranslateSql.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
<groupId>org.ohdsi.sql</groupId>
<artifactId>SqlRender</artifactId>
<packaging>jar</packaging>
<version>1.18.0-SNAPSHOT</version>
<version>1.18.1-SNAPSHOT</version>
<name>SqlRender</name>
<scm>
<connection>scm:git:https://github.com/OHDSI/SqlRender</connection>
Expand Down
66 changes: 33 additions & 33 deletions tests/testthat/test-translate-spark.R
Original file line number Diff line number Diff line change
Expand Up @@ -54,83 +54,83 @@ test_that("translate sql server -> spark convert date", {
})


test_that("translate sql server -> spark dateadd", {
test_that("translate sql server -> spark DATEADD", {
# Need custom translation pattern for negative intervals in Spark
sql <- translate("SELECT dateadd(second, -1 * 2, '2019-01-01 00:00:00')",
sql <- translate("SELECT DATEADD(second, -1 * 2, '2019-01-01 00:00:00')",
targetDialect = "spark"
)
expect_equal_ignore_spaces(sql, "SELECT DATE_ADD(second,-1 * 2,'2019-01-01 00:00:00')")
expect_equal_ignore_spaces(sql, "SELECT DATEADD(second,-1 * 2,'2019-01-01 00:00:00')")

sql <- translate("SELECT dateadd(minute, -1 * 3, '2019-01-01 00:00:00')",
sql <- translate("SELECT DATEADD(minute, -1 * 3, '2019-01-01 00:00:00')",
targetDialect = "spark"
)
expect_equal_ignore_spaces(sql, "SELECT DATE_ADD(minute,-1 * 3,'2019-01-01 00:00:00')")
expect_equal_ignore_spaces(sql, "SELECT DATEADD(minute,-1 * 3,'2019-01-01 00:00:00')")

sql <- translate("SELECT dateadd(hour, -1 * 4, '2019-01-01 00:00:00')",
sql <- translate("SELECT DATEADD(hour, -1 * 4, '2019-01-01 00:00:00')",
targetDialect = "spark"
)
expect_equal_ignore_spaces(sql, "SELECT DATE_ADD(hour,-1 * 4,'2019-01-01 00:00:00')")
expect_equal_ignore_spaces(sql, "SELECT DATEADD(hour,-1 * 4,'2019-01-01 00:00:00')")

# Positive intervals have typical translation patterns
sql <- translate("SELECT dateadd(second, 1, '2019-01-01 00:00:00')",
sql <- translate("SELECT DATEADD(second, 1, '2019-01-01 00:00:00')",
targetDialect = "spark"
)
expect_equal_ignore_spaces(sql, "SELECT DATE_ADD(second,1,'2019-01-01 00:00:00')")
expect_equal_ignore_spaces(sql, "SELECT DATEADD(second,1,'2019-01-01 00:00:00')")

sql <- translate("SELECT dateadd(minute, 1, '2019-01-01 00:00:00')",
sql <- translate("SELECT DATEADD(minute, 1, '2019-01-01 00:00:00')",
targetDialect = "spark"
)
expect_equal_ignore_spaces(sql, "SELECT DATE_ADD(minute,1,'2019-01-01 00:00:00')")
expect_equal_ignore_spaces(sql, "SELECT DATEADD(minute,1,'2019-01-01 00:00:00')")

sql <- translate("SELECT dateadd(hour, 1, '2019-01-01 00:00:00')",
sql <- translate("SELECT DATEADD(hour, 1, '2019-01-01 00:00:00')",
targetDialect = "spark"
)
expect_equal_ignore_spaces(sql, "SELECT DATE_ADD(hour,1,'2019-01-01 00:00:00')")
expect_equal_ignore_spaces(sql, "SELECT DATEADD(hour,1,'2019-01-01 00:00:00')")

sql <- translate("SELECT dateadd(d, 1, '2019-01-01')",
sql <- translate("SELECT DATEADD(d, 1, '2019-01-01')",
targetDialect = "spark"
)
expect_equal_ignore_spaces(sql, "SELECT DATE_ADD(day,1,'2019-01-01')")
expect_equal_ignore_spaces(sql, "SELECT DATEADD(day,1,'2019-01-01')")

sql <- translate("SELECT dateadd(dd, 1, '2019-01-01')",
sql <- translate("SELECT DATEADD(dd, 1, '2019-01-01')",
targetDialect = "spark"
)
expect_equal_ignore_spaces(sql, "SELECT DATE_ADD(day,1,'2019-01-01')")
expect_equal_ignore_spaces(sql, "SELECT DATEADD(day,1,'2019-01-01')")

sql <- translate("SELECT dateadd(day, 1, '2019-01-01')",
sql <- translate("SELECT DATEADD(day, 1, '2019-01-01')",
targetDialect = "spark"
)
expect_equal_ignore_spaces(sql, "SELECT DATE_ADD(day,1,'2019-01-01')")
expect_equal_ignore_spaces(sql, "SELECT DATEADD(day,1,'2019-01-01')")

sql <- translate("SELECT dateadd(m, 1, '2019-01-01')",
sql <- translate("SELECT DATEADD(m, 1, '2019-01-01')",
targetDialect = "spark"
)
expect_equal_ignore_spaces(sql, "SELECT DATE_ADD(month,1,'2019-01-01')")
expect_equal_ignore_spaces(sql, "SELECT DATEADD(month,1,'2019-01-01')")

sql <- translate("SELECT dateadd(mm, 1, '2019-01-01')",
sql <- translate("SELECT DATEADD(mm, 1, '2019-01-01')",
targetDialect = "spark"
)
expect_equal_ignore_spaces(sql, "SELECT DATE_ADD(month,1,'2019-01-01')")
expect_equal_ignore_spaces(sql, "SELECT DATEADD(month,1,'2019-01-01')")

sql <- translate("SELECT dateadd(month, 1, '2019-01-01')",
sql <- translate("SELECT DATEADD(month, 1, '2019-01-01')",
targetDialect = "spark"
)
expect_equal_ignore_spaces(sql, "SELECT DATE_ADD(month,1,'2019-01-01')")
expect_equal_ignore_spaces(sql, "SELECT DATEADD(month,1,'2019-01-01')")

sql <- translate("SELECT dateadd(yy, 1, '2019-01-01')",
sql <- translate("SELECT DATEADD(yy, 1, '2019-01-01')",
targetDialect = "spark"
)
expect_equal_ignore_spaces(sql, "SELECT DATE_ADD(year,1,'2019-01-01')")
expect_equal_ignore_spaces(sql, "SELECT DATEADD(year,1,'2019-01-01')")

sql <- translate("SELECT dateadd(yyyy, 1, '2019-01-01')",
sql <- translate("SELECT DATEADD(yyyy, 1, '2019-01-01')",
targetDialect = "spark"
)
expect_equal_ignore_spaces(sql, "SELECT DATE_ADD(year,1,'2019-01-01')")
expect_equal_ignore_spaces(sql, "SELECT DATEADD(year,1,'2019-01-01')")

sql <- translate("SELECT dateadd(year, 1, '2019-01-01')",
sql <- translate("SELECT DATEADD(year, 1, '2019-01-01')",
targetDialect = "spark"
)
expect_equal_ignore_spaces(sql, "SELECT DATE_ADD(year,1,'2019-01-01')")
expect_equal_ignore_spaces(sql, "SELECT DATEADD(year,1,'2019-01-01')")
})


Expand Down Expand Up @@ -422,14 +422,14 @@ test_that("translate sql server -> spark DATEADD DAY with float", {
sql <- translate("select DATEADD(DAY, 1.0, some_date) from my_table;",
targetDialect = "spark"
)
expect_equal_ignore_spaces(sql, "select DATE_ADD(day, 1, some_date) from my_table;")
expect_equal_ignore_spaces(sql, "select DATEADD(DAY, 1, some_date) from my_table;")
})

test_that("translate sql server -> spark DATEADD YEAR with float", {
sql <- translate("select DATEADD(YEAR, 1.0, some_date) from my_table;",
targetDialect = "spark"
)
expect_equal_ignore_spaces(sql, "select DATE_ADD(year, 1, some_date) from my_table;")
expect_equal_ignore_spaces(sql, "select DATEADD(YEAR, 1, some_date) from my_table;")
})

test_that("translate sql server -> spark DATEADD YEAR with float", {
Expand Down

0 comments on commit acdd6f3

Please sign in to comment.