Skip to content

Commit

Permalink
Revert "feat: support None operand in EQUAL operator (apache#21713)"
Browse files Browse the repository at this point in the history
This reverts commit 05648eb.
  • Loading branch information
john-bodley committed Nov 8, 2022
1 parent cd1b379 commit 32b1de1
Show file tree
Hide file tree
Showing 3 changed files with 42 additions and 90 deletions.
3 changes: 1 addition & 2 deletions superset/charts/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -819,8 +819,7 @@ class ChartDataFilterSchema(Schema):
)
val = fields.Raw(
description="The value or values to compare against. Can be a string, "
"integer, decimal, None or list, depending on the operator.",
allow_none=True,
"integer, decimal or list, depending on the operator.",
example=["China", "France", "Japan"],
)
grain = fields.String(
Expand Down
9 changes: 1 addition & 8 deletions superset/connectors/sqla/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -1629,14 +1629,7 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma
elif op == utils.FilterOperator.IS_FALSE.value:
where_clause_and.append(sqla_col.is_(False))
else:
if (
op
not in {
utils.FilterOperator.EQUALS.value,
utils.FilterOperator.NOT_EQUALS.value,
}
and eq is None
):
if eq is None:
raise QueryObjectValidationError(
_(
"Must specify a value for filters "
Expand Down
120 changes: 40 additions & 80 deletions tests/integration_tests/sqla_models_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@
from tests.integration_tests.test_app import app

from .base_tests import SupersetTestCase
from .conftest import only_postgresql

VIRTUAL_TABLE_INT_TYPES: Dict[str, Pattern[str]] = {
"hive": re.compile(r"^INT_TYPE$"),
Expand Down Expand Up @@ -666,90 +665,51 @@ def test_filter_on_text_column(text_column_table):
assert result_object.df["count"][0] == 1


@only_postgresql
def test_should_generate_closed_and_open_time_filter_range(login_as_admin):
table = SqlaTable(
table_name="temporal_column_table",
sql=(
"SELECT '2021-12-31'::timestamp as datetime_col "
"UNION SELECT '2022-01-01'::timestamp "
"UNION SELECT '2022-03-10'::timestamp "
"UNION SELECT '2023-01-01'::timestamp "
"UNION SELECT '2023-03-10'::timestamp "
),
database=get_example_database(),
)
TableColumn(
column_name="datetime_col",
type="TIMESTAMP",
table=table,
is_dttm=True,
)
SqlMetric(metric_name="count", expression="count(*)", table=table)
result_object = table.query(
{
"metrics": ["count"],
"is_timeseries": False,
"filter": [],
"from_dttm": datetime(2022, 1, 1),
"to_dttm": datetime(2023, 1, 1),
"granularity": "datetime_col",
}
)
""" >>> result_object.query
SELECT count(*) AS count
FROM
(SELECT '2021-12-31'::timestamp as datetime_col
UNION SELECT '2022-01-01'::timestamp
UNION SELECT '2022-03-10'::timestamp
UNION SELECT '2023-01-01'::timestamp
UNION SELECT '2023-03-10'::timestamp) AS virtual_table
WHERE datetime_col >= TO_TIMESTAMP('2022-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US')
AND datetime_col < TO_TIMESTAMP('2023-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US')
"""
assert result_object.df.iloc[0]["count"] == 2


def test_none_operand_in_filter(login_as_admin, physical_dataset):
expected_results = [
{
"operator": FilterOperator.EQUALS.value,
"count": 10,
"sql_should_contain": "COL4 IS NULL",
},
{
"operator": FilterOperator.NOT_EQUALS.value,
"count": 0,
"sql_should_contain": "COL4 IS NOT NULL",
},
]
for expected in expected_results:
result = physical_dataset.query(
def test_should_generate_closed_and_open_time_filter_range():
with app.app_context():
if backend() != "postgresql":
pytest.skip(f"{backend()} has different dialect for datetime column")

table = SqlaTable(
table_name="temporal_column_table",
sql=(
"SELECT '2021-12-31'::timestamp as datetime_col "
"UNION SELECT '2022-01-01'::timestamp "
"UNION SELECT '2022-03-10'::timestamp "
"UNION SELECT '2023-01-01'::timestamp "
"UNION SELECT '2023-03-10'::timestamp "
),
database=get_example_database(),
)
TableColumn(
column_name="datetime_col",
type="TIMESTAMP",
table=table,
is_dttm=True,
)
SqlMetric(metric_name="count", expression="count(*)", table=table)
result_object = table.query(
{
"metrics": ["count"],
"filter": [{"col": "col4", "val": None, "op": expected["operator"]}],
"is_timeseries": False,
"filter": [],
"from_dttm": datetime(2022, 1, 1),
"to_dttm": datetime(2023, 1, 1),
"granularity": "datetime_col",
}
)
assert result.df["count"][0] == expected["count"]
assert expected["sql_should_contain"] in result.query.upper()

with pytest.raises(QueryObjectValidationError):
for flt in [
FilterOperator.GREATER_THAN,
FilterOperator.LESS_THAN,
FilterOperator.GREATER_THAN_OR_EQUALS,
FilterOperator.LESS_THAN_OR_EQUALS,
FilterOperator.LIKE,
FilterOperator.ILIKE,
]:
physical_dataset.query(
{
"metrics": ["count"],
"filter": [{"col": "col4", "val": None, "op": flt.value}],
"is_timeseries": False,
}
)
""" >>> result_object.query
SELECT count(*) AS count
FROM
(SELECT '2021-12-31'::timestamp as datetime_col
UNION SELECT '2022-01-01'::timestamp
UNION SELECT '2022-03-10'::timestamp
UNION SELECT '2023-01-01'::timestamp
UNION SELECT '2023-03-10'::timestamp) AS virtual_table
WHERE datetime_col >= TO_TIMESTAMP('2022-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US')
AND datetime_col < TO_TIMESTAMP('2023-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US')
"""
assert result_object.df.iloc[0]["count"] == 2


@pytest.mark.parametrize(
Expand Down

0 comments on commit 32b1de1

Please sign in to comment.