-
-
Notifications
You must be signed in to change notification settings - Fork 33.1k
Fixed #36134 -- Add support for ABSENT ON NULL clause for JSONArray. #19097
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,33 +1,118 @@ | ||
from django.db import NotSupportedError | ||
from django.db.models.expressions import Func, Value | ||
from django.db.models.expressions import Case, Func, Value, When | ||
from django.db.models.fields import TextField | ||
from django.db.models.fields.json import JSONField | ||
from django.db.models.functions import Cast | ||
from django.db.models.lookups import IsNull | ||
|
||
|
||
class _JSONArrayConcat(Func): | ||
john-parton marked this conversation as resolved.
Show resolved
Hide resolved
|
||
# Concatenate multiple JSON arrays into a single JSON array. | ||
# If any value is NULL, the entire array is NULL. | ||
# Duplicates are preserved. | ||
# This function cannot take objects because the behavior is backend dependent. | ||
# For example, on MySQL the merge is recursive, but on PostgreSQL | ||
# it is not. | ||
|
||
function = None | ||
output_field = JSONField() | ||
|
||
def __init__(self, *expressions, **extra): | ||
if len(expressions) < 2: | ||
raise ValueError("_JSONArrayConcat must take at least two expressions") | ||
super().__init__(*expressions, **extra) | ||
|
||
def as_sql(self, compiler, connection, **extra_context): | ||
if not connection.features.supports_json_array_concat: | ||
raise NotSupportedError( | ||
"Concatenating JSON arrays is not supported on this database backend." | ||
) | ||
return super().as_sql(compiler, connection, **extra_context) | ||
|
||
def pipes_concat_sql(self, compiler, connection, **extra_context): | ||
return super().as_sql( | ||
compiler, | ||
connection, | ||
template="(%(expressions)s)", | ||
arg_joiner=" || ", | ||
**extra_context, | ||
) | ||
|
||
def as_mysql(self, compiler, connection, **extra_context): | ||
return super().as_sql( | ||
compiler, | ||
connection, | ||
function="JSON_MERGE_PRESERVE", | ||
**extra_context, | ||
) | ||
|
||
def as_oracle(self, compiler, connection, **extra_context): | ||
return self.pipes_concat_sql(compiler, connection, **extra_context) | ||
john-parton marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
||
def as_postgresql(self, compiler, connection, **extra_context): | ||
return self.pipes_concat_sql(compiler, connection, **extra_context) | ||
|
||
|
||
class JSONArray(Func): | ||
function = "JSON_ARRAY" | ||
output_field = JSONField() | ||
|
||
def __init__(self, *expressions, absent_on_null=False): | ||
self.absent_on_null = absent_on_null | ||
super().__init__(*expressions) | ||
|
||
def _absent_on_null_workaround(self, compiler): | ||
# On backends that do not support ABSENT ON NULL, we can implement the behavior | ||
# so long as the backend has a way to concatenate JSON arrays. | ||
unit_arrays = [ | ||
Case( | ||
When(IsNull(expression, True), then=JSONArray()), | ||
default=JSONArray(expression), | ||
) | ||
for expression in self.get_source_expressions() | ||
] | ||
|
||
if len(unit_arrays) == 0: | ||
expression = JSONArray() | ||
elif len(unit_arrays) == 1: | ||
expression = unit_arrays[0] | ||
else: | ||
expression = _JSONArrayConcat(*unit_arrays) | ||
|
||
return compiler.compile(expression) | ||
|
||
def as_sql(self, compiler, connection, **extra_context): | ||
if not connection.features.supports_json_field: | ||
raise NotSupportedError( | ||
"JSONFields are not supported on this database backend." | ||
) | ||
if self.absent_on_null and not connection.features.supports_json_absent_on_null: | ||
raise NotSupportedError( | ||
"ABSENT ON NULL is not supported by this database backend." | ||
) | ||
john-parton marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
return super().as_sql(compiler, connection, **extra_context) | ||
|
||
def as_mysql(self, compiler, connection, **extra_context): | ||
if self.absent_on_null: | ||
return self._absent_on_null_workaround(compiler) | ||
|
||
return super().as_sql(compiler, connection, **extra_context) | ||
|
||
def as_native(self, compiler, connection, *, returning, **extra_context): | ||
# PostgreSQL 16+ and Oracle remove SQL NULL values from the array by | ||
# default. Adds the NULL ON NULL clause to keep NULL values in the | ||
# array, mapping them to JSON null values, which matches the behavior | ||
# of SQLite. | ||
null_on_null = "NULL ON NULL" if len(self.get_source_expressions()) > 0 else "" | ||
# Providing the ON NULL clause when no source expressions are provided is a | ||
# syntax error on some backends. | ||
if len(self.get_source_expressions()) == 0: | ||
on_null_clause = "" | ||
elif self.absent_on_null: | ||
on_null_clause = "ABSENT ON NULL" | ||
else: | ||
on_null_clause = "NULL ON NULL" | ||
|
||
return self.as_sql( | ||
compiler, | ||
connection, | ||
template=( | ||
f"%(function)s(%(expressions)s {null_on_null} RETURNING {returning})" | ||
f"%(function)s(%(expressions)s {on_null_clause} RETURNING {returning})" | ||
|
||
), | ||
**extra_context, | ||
) | ||
|
@@ -54,6 +139,9 @@ def as_postgresql(self, compiler, connection, **extra_context): | |
compiler, connection, returning="JSONB", **extra_context | ||
) | ||
|
||
if self.absent_on_null: | ||
return casted_obj._absent_on_null_workaround(compiler) | ||
|
||
return casted_obj.as_sql( | ||
compiler, | ||
connection, | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,63 @@ | ||
from django.db import NotSupportedError | ||
from django.db.models import F, Value | ||
from django.db.models.functions import JSONArray | ||
from django.db.models.functions.json import _JSONArrayConcat as JSONConcat | ||
from django.test import TestCase | ||
from django.test.testcases import skipIfDBFeature, skipUnlessDBFeature | ||
|
||
from ..models import Author | ||
|
||
|
||
@skipUnlessDBFeature("supports_json_array_concat") | ||
class JSONArrayConcatTests(TestCase): | ||
john-parton marked this conversation as resolved.
Show resolved
Hide resolved
|
||
@classmethod | ||
def setUpTestData(cls): | ||
Author.objects.bulk_create( | ||
[ | ||
Author(name="Ivan Ivanov", alias="iivanov"), | ||
] | ||
) | ||
|
||
def test_invalid(self): | ||
msg = "_JSONArrayConcat must take at least two expressions" | ||
with self.assertRaisesMessage(ValueError, msg): | ||
Author.objects.annotate(json=JSONConcat()).first() | ||
with self.assertRaisesMessage(ValueError, msg): | ||
Author.objects.annotate(json=JSONConcat(JSONArray(F("name")))).first() | ||
|
||
def test_simple_array(self): | ||
obj = Author.objects.annotate( | ||
arr=JSONConcat( | ||
JSONArray("name"), | ||
JSONArray("alias"), | ||
) | ||
).first() | ||
self.assertEqual(obj.arr, ["Ivan Ivanov", "iivanov"]) | ||
|
||
def test_array_and_null(self): | ||
obj = Author.objects.annotate( | ||
json=JSONConcat(JSONArray("name"), Value(None)) | ||
).first() | ||
self.assertEqual(obj.json, None) | ||
|
||
def test_duplicates_preserved(self): | ||
obj = Author.objects.annotate( | ||
arr=JSONConcat( | ||
JSONArray("name"), | ||
JSONArray("name"), | ||
) | ||
).first() | ||
self.assertEqual(obj.arr, ["Ivan Ivanov", "Ivan Ivanov"]) | ||
|
||
|
||
@skipIfDBFeature("has_json_object_function") | ||
class JSONArrayConcatNotSupportedTests(TestCase): | ||
def test_not_supported(self): | ||
msg = "Concatenating JSON arrays is not supported on this database backend." | ||
with self.assertRaisesMessage(NotSupportedError, msg): | ||
Author.objects.annotate( | ||
arr=JSONConcat( | ||
JSONArray("name"), | ||
JSONArray("alias"), | ||
) | ||
).first() |
Uh oh!
There was an error while loading. Please reload this page.