Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Sync with upstream #1

Merged
merged 5 commits into from
Jan 29, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,8 @@ def get_git_sha():
'requests==2.17.3',
'simplejson==3.10.0',
'six==1.11.0',
'sqlalchemy==1.1.9',
'sqlalchemy-utils==0.32.16',
'sqlalchemy==1.2.2',
'sqlalchemy-utils==0.32.21',
'sqlparse==0.2.3',
'thrift>=0.9.3',
'thrift-sasl>=0.2.1',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class QueryAutoRefresh extends React.PureComponent {
const queryKeys = Object.keys(queries);
const queriesAsArray = queryKeys.map(key => queries[key]);
return queriesAsArray.some(q =>
['running', 'started', 'pending', 'fetching'].indexOf(q.state) >= 0).length;
['running', 'started', 'pending', 'fetching'].indexOf(q.state) >= 0);
}
startTimer() {
if (!(this.timer)) {
Expand Down
21 changes: 12 additions & 9 deletions superset/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,22 +136,25 @@ def load_examples(load_test_data):
print('Loading [Misc Charts] dashboard')
data.load_misc_dashboard()

print('Loading DECK.gl demo')
data.load_deck_dash()

print('Loading Paris geojson data')
print('Loading [Paris GeoJson]')
data.load_paris_iris_geojson()

if load_test_data:
print('Loading [Unicode test data]')
data.load_unicode_test_data()
print('Loading [San Francisco population polygons]')
data.load_sf_population_polygons()

print('Loading flights data')
print('Loading [Flights data]')
data.load_flights()

print('Loading bart lines data')
print('Loading [BART lines]')
data.load_bart_lines()

if load_test_data:
print('Loading [Unicode test data]')
data.load_unicode_test_data()

print('Loading DECK.gl demo')
data.load_deck_dash()


@manager.option(
'-d', '--datasource',
Expand Down
5 changes: 4 additions & 1 deletion superset/connectors/druid/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,9 @@ class DruidCluster(Model, AuditMixinNullable, ImportMixin):
def __repr__(self):
return self.verbose_name if self.verbose_name else self.cluster_name

def __html__(self):
return self.__repr__()

@property
def data(self):
return {
Expand Down Expand Up @@ -1037,7 +1040,7 @@ def run_query( # noqa / druid
inner_from_dttm=None, inner_to_dttm=None,
orderby=None,
extras=None, # noqa
columns=None, phase=2, client=None, form_data=None,
columns=None, phase=2, client=None,
order_desc=True,
prequeries=None,
is_prequery=False,
Expand Down
3 changes: 0 additions & 3 deletions superset/connectors/sqla/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -446,7 +446,6 @@ def get_sqla_query( # sqla
orderby=None,
extras=None,
columns=None,
form_data=None,
order_desc=True,
prequeries=None,
is_prequery=False,
Expand All @@ -458,7 +457,6 @@ def get_sqla_query( # sqla
'metrics': metrics,
'row_limit': row_limit,
'to_dttm': to_dttm,
'form_data': form_data,
}
template_processor = self.get_template_processor(**template_kwargs)
db_engine_spec = self.database.db_engine_spec
Expand Down Expand Up @@ -654,7 +652,6 @@ def get_sqla_query( # sqla
'orderby': orderby,
'extras': extras,
'columns': columns,
'form_data': form_data,
'order_desc': True,
}
result = self.query(subquery_obj)
Expand Down
230 changes: 219 additions & 11 deletions superset/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1433,6 +1433,194 @@ def load_deck_dash():
merge_slice(slc)
slices.append(slc)

polygon_tbl = db.session.query(TBL) \
.filter_by(table_name='sf_population_polygons').first()
slice_data = {
"datasource": "11__table",
"viz_type": "deck_polygon",
"slice_id": 41,
"granularity_sqla": None,
"time_grain_sqla": None,
"since": "7 days ago",
"until": "now",
"line_column": "contour",
"line_type": "json",
"mapbox_style": "mapbox://styles/mapbox/light-v9",
"viewport": {
"longitude": -122.43388541747726,
"latitude": 37.752020331384834,
"zoom": 11.133995608594631,
"bearing": 37.89506450385642,
"pitch": 60,
"width": 667,
"height": 906,
"altitude": 1.5,
"maxZoom": 20,
"minZoom": 0,
"maxPitch": 60,
"minPitch": 0,
"maxLatitude": 85.05113,
"minLatitude": -85.05113
},
"reverse_long_lat": False,
"fill_color_picker": {
"r": 3,
"g": 65,
"b": 73,
"a": 1
},
"stroke_color_picker": {
"r": 0,
"g": 122,
"b": 135,
"a": 1
},
"filled": True,
"stroked": False,
"extruded": True,
"point_radius_scale": 100,
"js_columns": [
"population",
"area"
],
"js_datapoint_mutator": "(d) => {\n d.elevation = d.extraProps.population/d.extraProps.area/10\n \
d.fillColor = [d.extraProps.population/d.extraProps.area/60,140,0]\n \
return d;\n}",
"js_tooltip": "",
"js_onclick_href": "",
"where": "",
"having": "",
"filters": []
}

print("Creating Polygon slice")
slc = Slice(
slice_name="Polygons",
viz_type='deck_polygon',
datasource_type='table',
datasource_id=polygon_tbl.id,
params=get_slice_json(slice_data),
)
merge_slice(slc)
slices.append(slc)

slice_data = {
"datasource": "10__table",
"viz_type": "deck_arc",
"slice_id": 42,
"granularity_sqla": "date",
"time_grain_sqla": "Time Column",
"since": "2014-01-01",
"until": "now",
"start_spatial": {
"type": "latlong",
"latCol": "LATITUDE",
"lonCol": "LONGITUDE"
},
"end_spatial": {
"type": "latlong",
"latCol": "LATITUDE_DEST",
"lonCol": "LONGITUDE_DEST"
},
"row_limit": 5000,
"mapbox_style": "mapbox://styles/mapbox/light-v9",
"viewport": {
"altitude": 1.5,
"bearing": 8.546256357301871,
"height": 642,
"latitude": 44.596651438714254,
"longitude": -91.84340711201104,
"maxLatitude": 85.05113,
"maxPitch": 60,
"maxZoom": 20,
"minLatitude": -85.05113,
"minPitch": 0,
"minZoom": 0,
"pitch": 60,
"width": 997,
"zoom": 2.929837070560775
},
"color_picker": {
"r": 0,
"g": 122,
"b": 135,
"a": 1
},
"stroke_width": 1,
"where": "",
"having": "",
"filters": []
}

print("Creating Arc slice")
slc = Slice(
slice_name="Arcs",
viz_type='deck_arc',
datasource_type='table',
datasource_id=db.session.query(TBL).filter_by(table_name='flights').first().id,
params=get_slice_json(slice_data),
)
merge_slice(slc)
slices.append(slc)

slice_data = {
"datasource": "12__table",
"slice_id": 43,
"viz_type": "deck_path",
"time_grain_sqla": "Time Column",
"since": "7 days ago",
"until": "now",
"line_column": "path_json",
"line_type": "json",
"row_limit": 5000,
"mapbox_style": "mapbox://styles/mapbox/light-v9",
"viewport": {
"longitude": -122.18885402582598,
"latitude": 37.73671752604488,
"zoom": 9.51847667620428,
"bearing": 0,
"pitch": 0,
"width": 669,
"height": 1094,
"altitude": 1.5,
"maxZoom": 20,
"minZoom": 0,
"maxPitch": 60,
"minPitch": 0,
"maxLatitude": 85.05113,
"minLatitude": -85.05113
},
"color_picker": {
"r": 0,
"g": 122,
"b": 135,
"a": 1
},
"line_width": 150,
"reverse_long_lat": False,
"js_columns": [
"color"
],
"js_datapoint_mutator": "d => {\n return {\n ...d,\n color: \
colors.hexToRGB(d.extraProps.color),\n }\n}",
"js_tooltip": "",
"js_onclick_href": "",
"where": "",
"having": "",
"filters": []
}

print("Creating Path slice")
slc = Slice(
slice_name="Path",
viz_type='deck_path',
datasource_type='table',
datasource_id=db.session.query(TBL).filter_by(table_name='bart_lines').first().id,
params=get_slice_json(slice_data),
)
merge_slice(slc)
slices.append(slc)

print("Creating a dashboard")
title = "deck.gl Demo"
dash = db.session.query(Dash).filter_by(dashboard_title=title).first()
Expand Down Expand Up @@ -1468,6 +1656,27 @@ def load_deck_dash():
"size_x": 6,
"size_y": 4,
"slice_id": "40"
},
{
"col": 1,
"row": 4,
"size_x": 6,
"size_y": 4,
"slice_id": "41"
},
{
"col": 7,
"row": 4,
"size_x": 6,
"size_y": 4,
"slice_id": "42"
},
{
"col": 1,
"row": 5,
"size_x": 6,
"size_y": 4,
"slice_id": "43"
}
]
""")
Expand All @@ -1484,6 +1693,7 @@ def load_deck_dash():

def load_flights():
"""Loading random time series data from a zip file in the repo"""
tbl_name = 'flights'
with gzip.open(os.path.join(DATA_FOLDER, 'fligth_data.csv.gz')) as f:
pdf = pd.read_csv(f, encoding='latin-1')

Expand All @@ -1501,25 +1711,23 @@ def load_flights():
pdf = pdf.join(airports, on='ORIGIN_AIRPORT', rsuffix='_ORIG')
pdf = pdf.join(airports, on='DESTINATION_AIRPORT', rsuffix='_DEST')
pdf.to_sql(
'flights',
tbl_name,
db.engine,
if_exists='replace',
chunksize=500,
dtype={
'ds': DateTime,
},
index=False)
print("Done loading table!")

print("Creating table [random_time_series] reference")
obj = db.session.query(TBL).filter_by(table_name='random_time_series').first()
if not obj:
obj = TBL(table_name='flights')
obj.main_dttm_col = 'ds'
obj.database = get_or_create_main_db()
db.session.merge(obj)
tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first()
if not tbl:
tbl = TBL(table_name=tbl_name)
tbl.description = "Random set of flights in the US"
tbl.database = get_or_create_main_db()
db.session.merge(tbl)
db.session.commit()
obj.fetch_metadata()
tbl.fetch_metadata()
print("Done loading table!")


def load_paris_iris_geojson():
Expand Down