Skip to content

Commit

Permalink
Adding Druid Time Granularities (#5379)
Browse files Browse the repository at this point in the history
* Adding Druid Time Granularities

* fixed a linter error
  • Loading branch information
JamshedRahman authored and mistercrunch committed Jul 12, 2018
1 parent cd2414b commit cafde15
Show file tree
Hide file tree
Showing 3 changed files with 61 additions and 1 deletion.
2 changes: 2 additions & 0 deletions superset/assets/src/explore/controls.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -765,6 +765,8 @@ export const controls = {
['week_starting_sunday', 'week starting Sunday'],
['week_ending_saturday', 'week ending Saturday'],
['P1M', 'month'],
['P3M', 'quarter'],
['P1Y', 'year'],
],
description: t('The time granularity for the visualization. Note that you ' +
'can type and use simple natural language as in `10 seconds`, ' +
Expand Down
4 changes: 3 additions & 1 deletion superset/connectors/druid/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,7 +536,7 @@ def time_column_grains(self):
'all', '5 seconds', '30 seconds', '1 minute', '5 minutes'
'30 minutes', '1 hour', '6 hour', '1 day', '7 days',
'week', 'week_starting_sunday', 'week_ending_saturday',
'month',
'month', 'quarter', 'year',
],
'time_grains': ['now'],
}
Expand Down Expand Up @@ -744,6 +744,8 @@ def granularity(period_name, timezone=None, origin=None):
'week_starting_sunday': 'P1W',
'week_ending_saturday': 'P1W',
'month': 'P1M',
'quarter': 'P3M',
'year': 'P1Y',
}

granularity = {'type': 'period'}
Expand Down
56 changes: 56 additions & 0 deletions tests/druid_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -460,6 +460,62 @@ def test_urls(self):
cluster.get_base_broker_url(),
'http://localhost:7980/druid/v2')

@patch('superset.connectors.druid.models.PyDruid')
def test_druid_time_granularities(self, PyDruid):
self.login(username='admin')
cluster = self.get_cluster(PyDruid)
cluster.refresh_datasources()
cluster.refresh_datasources(merge_flag=True)
datasource_id = cluster.datasources[0].id
db.session.commit()

nres = [
list(v['event'].items()) + [('timestamp', v['timestamp'])]
for v in GB_RESULT_SET]
nres = [dict(v) for v in nres]
import pandas as pd
df = pd.DataFrame(nres)
instance = PyDruid.return_value
instance.export_pandas.return_value = df
instance.query_dict = {}
instance.query_builder.last_query.query_dict = {}

form_data = {
'viz_type': 'table',
'since': '7+days+ago',
'until': 'now',
'metrics': ['count'],
'groupby': [],
'include_time': 'true',
}

granularity_map = {
'5 seconds': 'PT5S',
'30 seconds': 'PT30S',
'1 minute': 'PT1M',
'5 minutes': 'PT5M',
'1 hour': 'PT1H',
'6 hour': 'PT6H',
'one day': 'P1D',
'1 day': 'P1D',
'7 days': 'P7D',
'week': 'P1W',
'week_starting_sunday': 'P1W',
'week_ending_saturday': 'P1W',
'month': 'P1M',
'quarter': 'P3M',
'year': 'P1Y',
}
url = ('/superset/explore_json/druid/{}/'.format(datasource_id))

for granularity_mapping in granularity_map:
form_data['granularity'] = granularity_mapping
self.get_json_resp(url, {'form_data': json.dumps(form_data)})
self.assertEqual(
granularity_map[granularity_mapping],
instance.timeseries.call_args[1]['granularity']['period'],
)


if __name__ == '__main__':
unittest.main()

0 comments on commit cafde15

Please sign in to comment.