Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

query_runner -> query_results: improve logging, handle unhandled data types #6905

Open
wants to merge 24 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
6 changes: 5 additions & 1 deletion redash/query_runner/query_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,9 +109,12 @@ def flatten(value):
return json_dumps(value)
elif isinstance(value, decimal.Decimal):
return float(value)
elif isinstance(value, datetime.timedelta):
elif isinstance(value, (datetime.date, datetime.time, datetime.datetime, datetime.timedelta)):
return str(value)
else:
if logger.isEnabledFor(logging.DEBUG):
if not isinstance(value, (type(None), str, float, int, bool)):
logger.debug("flatten() found unhandled type: %s", str(type(value)))
return value


Expand All @@ -134,6 +137,7 @@ def create_table(connection, table_name, query_results):
column_list=column_list,
place_holders=",".join(["?"] * len(columns)),
)
logger.debug("INSERT template: %s", insert_template)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Pretty sure we don't want debugging statements being unconditionally run. At least with the change above it, it seems to be done conditionally there.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@justinclift thank you for the reply! So there are 2 things:

  • logger only logs (prints to stdout/stderr) messages, nothing is actually run/executed. The default log level is INFO, so logger.debug is actually a condition which logs the data only at a more verbose level
  • for another added logger there is a condition that checks the current log level and prevents going through too many checks unless its DEBUG (which is not default)

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@vtatarin Sorry, but I'm short on time for the next few weeks.

Am getting some (Redash related) stuff deployed to a data centre, and that's taking the majority of my focus time. When that's done I'll be able to look at PRs properly. 😄


for row in query_results["rows"]:
values = [flatten(row.get(column)) for column in columns]
Expand Down
58 changes: 58 additions & 0 deletions tests/query_runner/test_query_results.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import datetime
import decimal
import logging
import sqlite3
from unittest import TestCase

Expand All @@ -15,6 +16,7 @@
extract_query_ids,
extract_query_params,
fix_column_name,
flatten,
get_query_results,
prepare_parameterized_query,
replace_query_parameters,
Expand Down Expand Up @@ -248,3 +250,59 @@ def test_non_cached_query_result(self):
query_result_data = {"columns": [], "rows": []}
qr.return_value = (query_result_data, None)
self.assertEqual(query_result_data, get_query_results(self.factory.user, query.id, False))


class TestFlatten(TestCase):
def test_flatten_with_string(self):
self.assertEqual(flatten("hello"), "hello")

def test_flatten_with_integer(self):
self.assertEqual(flatten(10), 10)

def test_flatten_with_float(self):
self.assertEqual(flatten(10.5), 10.5)

def test_flatten_with_boolean(self):
self.assertEqual(flatten(True), True)

def test_flatten_with_decimal(self):
self.assertEqual(flatten(decimal.Decimal("10.5")), 10.5)

def test_flatten_with_date(self):
date = datetime.date(2021, 1, 1)
self.assertEqual(flatten(date), "2021-01-01")

def test_flatten_with_time(self):
time = datetime.time(12, 30)
self.assertEqual(flatten(time), "12:30:00")

def test_flatten_with_datetime(self):
datetime_obj = datetime.datetime(2021, 1, 1, 12, 30)
self.assertEqual(flatten(datetime_obj), "2021-01-01 12:30:00")

def test_flatten_with_timedelta(self):
timedelta_obj = datetime.timedelta(days=2)
self.assertEqual(flatten(timedelta_obj), "2 days, 0:00:00")

def test_flatten_with_list(self):
self.assertEqual(flatten([1, 2, 3]), "[1, 2, 3]")

def test_flatten_with_dictionary(self):
self.assertEqual(flatten({"key": "value"}), '{"key": "value"}')

def test_flatten_with_none(self):
self.assertEqual(flatten(None), None)

@pytest.fixture(autouse=True)
def inject_fixtures(self, caplog):
self._caplog = caplog

def test_flatten_unhandled_type(self):
class CustomType:
pass

instance = CustomType()
with self._caplog.at_level(logging.DEBUG):
result = flatten(instance)
self.assertEqual(result, instance)
self.assertIn("flatten() found unhandled type: %s" % str(type(instance)), self._caplog.records[0].message)