Skip to content

Commit

Permalink
Adding order_desc flag to explore endpoint (#3439)
Browse files Browse the repository at this point in the history
  • Loading branch information
fabianmenges authored and mistercrunch committed Sep 12, 2017
1 parent 490c707 commit 3c0e85e
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 9 deletions.
13 changes: 7 additions & 6 deletions superset/connectors/druid/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -812,7 +812,8 @@ def run_query( # noqa / druid
orderby=None,
extras=None, # noqa
select=None, # noqa
columns=None, phase=2, client=None, form_data=None):
columns=None, phase=2, client=None, form_data=None,
order_desc=True):
"""Runs a query against Druid and returns a dataframe.
"""
# TODO refactor into using a TBD Query object
Expand Down Expand Up @@ -882,20 +883,20 @@ def run_query( # noqa / druid
having_filters = self.get_having_filters(extras.get('having_druid'))
if having_filters:
qry['having'] = having_filters

order_direction = "descending" if order_desc else "ascending"
orig_filters = filters
if len(groupby) == 0 and not having_filters:
del qry['dimensions']
client.timeseries(**qry)
if not having_filters and len(groupby) == 1:
if not having_filters and len(groupby) == 1 and order_desc:
qry['threshold'] = timeseries_limit or 1000
if row_limit and granularity == 'all':
qry['threshold'] = row_limit
qry['dimension'] = list(qry.get('dimensions'))[0]
del qry['dimensions']
qry['metric'] = list(qry['aggregations'].keys())[0]
client.topn(**qry)
elif len(groupby) > 1 or having_filters:
elif len(groupby) > 1 or having_filters or not order_desc:
# If grouping on multiple fields or using a having filter
# we have to force a groupby query
if timeseries_limit and is_timeseries:
Expand All @@ -913,7 +914,7 @@ def run_query( # noqa / druid
inner_to_dttm.isoformat()),
"columns": [{
"dimension": order_by,
"direction": "descending",
"direction": order_direction,
}],
}
client.groupby(**pre_qry)
Expand Down Expand Up @@ -956,7 +957,7 @@ def run_query( # noqa / druid
"columns": [{
"dimension": (
metrics[0] if metrics else self.metrics[0]),
"direction": "descending",
"direction": order_direction,
}],
}
client.groupby(**qry)
Expand Down
9 changes: 6 additions & 3 deletions superset/connectors/sqla/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,8 @@ def get_sqla_query( # sqla
orderby=None,
extras=None,
columns=None,
form_data=None):
form_data=None,
order_desc=True):
"""Querying any sqla table from this common interface"""
template_kwargs = {
'from_dttm': from_dttm,
Expand Down Expand Up @@ -512,7 +513,8 @@ def get_sqla_query( # sqla
qry = qry.where(and_(*where_clause_and))
qry = qry.having(and_(*having_clause_and))
if groupby:
qry = qry.order_by(desc(main_metric_expr))
direction = desc if order_desc else asc
qry = qry.order_by(direction(main_metric_expr))
elif orderby:
for col, ascending in orderby:
direction = asc if ascending else desc
Expand All @@ -539,7 +541,8 @@ def get_sqla_query( # sqla
ob = inner_main_metric_expr
if timeseries_limit_metric_expr is not None:
ob = timeseries_limit_metric_expr
subq = subq.order_by(desc(ob))
direction = desc if order_desc else asc
subq = subq.order_by(direction(ob))
subq = subq.limit(timeseries_limit)
on_clause = []
for i, gb in enumerate(groupby):
Expand Down
4 changes: 4 additions & 0 deletions superset/viz.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,9 @@ def query_obj(self):
row_limit = int(
form_data.get("row_limit") or config.get("ROW_LIMIT"))

# default order direction
order_desc = form_data.get("order_desc", True)

# __form and __to are special extra_filters that target time
# boundaries. The rest of extra_filters are simple
# [column_name in list_of_values]. `__` prefix is there to avoid
Expand Down Expand Up @@ -194,6 +197,7 @@ def query_obj(self):
'extras': extras,
'timeseries_limit_metric': timeseries_limit_metric,
'form_data': form_data,
'order_desc': order_desc
}
return d

Expand Down

0 comments on commit 3c0e85e

Please sign in to comment.